Actual source code: matrix.c
1: /*
2: This is where the abstract matrix operations are defined
3: Portions of this code are under:
4: Copyright (c) 2022 Advanced Micro Devices, Inc. All rights reserved.
5: */
7: #include <petsc/private/matimpl.h>
8: #include <petsc/private/isimpl.h>
9: #include <petsc/private/vecimpl.h>
11: /* Logging support */
12: PetscClassId MAT_CLASSID;
13: PetscClassId MAT_COLORING_CLASSID;
14: PetscClassId MAT_FDCOLORING_CLASSID;
15: PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
17: PetscLogEvent MAT_Mult, MAT_MultAdd, MAT_MultTranspose;
18: PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve, MAT_MatTrSolve;
19: PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
20: PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
21: PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
22: PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
23: PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
24: PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
25: PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply, MAT_Transpose, MAT_FDColoringFunction, MAT_CreateSubMat;
26: PetscLogEvent MAT_TransposeColoringCreate;
27: PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
28: PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric, MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
29: PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
30: PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
31: PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
32: PetscLogEvent MAT_MultHermitianTranspose, MAT_MultHermitianTransposeAdd;
33: PetscLogEvent MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
34: PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
35: PetscLogEvent MAT_GetMultiProcBlock;
36: PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
37: PetscLogEvent MAT_HIPSPARSECopyToGPU, MAT_HIPSPARSECopyFromGPU, MAT_HIPSPARSEGenerateTranspose, MAT_HIPSPARSESolveAnalysis;
38: PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
39: PetscLogEvent MAT_SetValuesBatch;
40: PetscLogEvent MAT_ViennaCLCopyToGPU;
41: PetscLogEvent MAT_CUDACopyToGPU;
42: PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
43: PetscLogEvent MAT_Merge, MAT_Residual, MAT_SetRandom;
44: PetscLogEvent MAT_FactorFactS, MAT_FactorInvS;
45: PetscLogEvent MATCOLORING_Apply, MATCOLORING_Comm, MATCOLORING_Local, MATCOLORING_ISCreate, MATCOLORING_SetUp, MATCOLORING_Weights;
46: PetscLogEvent MAT_H2Opus_Build, MAT_H2Opus_Compress, MAT_H2Opus_Orthog, MAT_H2Opus_LR;
48: const char *const MatFactorTypes[] = {"NONE", "LU", "CHOLESKY", "ILU", "ICC", "ILUDT", "QR", "MatFactorType", "MAT_FACTOR_", NULL};
50: /*@
51: MatSetRandom - Sets all components of a matrix to random numbers.
53: Logically Collective
55: Input Parameters:
56: + x - the matrix
57: - rctx - the `PetscRandom` object, formed by `PetscRandomCreate()`, or `NULL` and
58: it will create one internally.
60: Example:
61: .vb
62: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
63: MatSetRandom(x,rctx);
64: PetscRandomDestroy(rctx);
65: .ve
67: Level: intermediate
69: Notes:
70: For sparse matrices that have been preallocated but not been assembled, it randomly selects appropriate locations,
72: for sparse matrices that already have nonzero locations, it fills the locations with random numbers.
74: It generates an error if used on unassembled sparse matrices that have not been preallocated.
76: .seealso: [](ch_matrices), `Mat`, `PetscRandom`, `PetscRandomCreate()`, `MatZeroEntries()`, `MatSetValues()`, `PetscRandomDestroy()`
77: @*/
78: PetscErrorCode MatSetRandom(Mat x, PetscRandom rctx)
79: {
80: PetscRandom randObj = NULL;
82: PetscFunctionBegin;
86: MatCheckPreallocated(x, 1);
88: if (!rctx) {
89: MPI_Comm comm;
90: PetscCall(PetscObjectGetComm((PetscObject)x, &comm));
91: PetscCall(PetscRandomCreate(comm, &randObj));
92: PetscCall(PetscRandomSetType(randObj, x->defaultrandtype));
93: PetscCall(PetscRandomSetFromOptions(randObj));
94: rctx = randObj;
95: }
96: PetscCall(PetscLogEventBegin(MAT_SetRandom, x, rctx, 0, 0));
97: PetscUseTypeMethod(x, setrandom, rctx);
98: PetscCall(PetscLogEventEnd(MAT_SetRandom, x, rctx, 0, 0));
100: PetscCall(MatAssemblyBegin(x, MAT_FINAL_ASSEMBLY));
101: PetscCall(MatAssemblyEnd(x, MAT_FINAL_ASSEMBLY));
102: PetscCall(PetscRandomDestroy(&randObj));
103: PetscFunctionReturn(PETSC_SUCCESS);
104: }
106: /*@
107: MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
109: Logically Collective
111: Input Parameter:
112: . mat - the factored matrix
114: Output Parameters:
115: + pivot - the pivot value computed
116: - row - the row that the zero pivot occurred. This row value must be interpreted carefully due to row reorderings and which processes
117: the share the matrix
119: Level: advanced
121: Notes:
122: This routine does not work for factorizations done with external packages.
124: This routine should only be called if `MatGetFactorError()` returns a value of `MAT_FACTOR_NUMERIC_ZEROPIVOT`
126: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
128: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`,
129: `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`,
130: `MAT_FACTOR_NUMERIC_ZEROPIVOT`
131: @*/
132: PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat, PetscReal *pivot, PetscInt *row)
133: {
134: PetscFunctionBegin;
136: PetscAssertPointer(pivot, 2);
137: PetscAssertPointer(row, 3);
138: *pivot = mat->factorerror_zeropivot_value;
139: *row = mat->factorerror_zeropivot_row;
140: PetscFunctionReturn(PETSC_SUCCESS);
141: }
143: /*@
144: MatFactorGetError - gets the error code from a factorization
146: Logically Collective
148: Input Parameter:
149: . mat - the factored matrix
151: Output Parameter:
152: . err - the error code
154: Level: advanced
156: Note:
157: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
159: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`,
160: `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`, `MatFactorError`
161: @*/
162: PetscErrorCode MatFactorGetError(Mat mat, MatFactorError *err)
163: {
164: PetscFunctionBegin;
166: PetscAssertPointer(err, 2);
167: *err = mat->factorerrortype;
168: PetscFunctionReturn(PETSC_SUCCESS);
169: }
171: /*@
172: MatFactorClearError - clears the error code in a factorization
174: Logically Collective
176: Input Parameter:
177: . mat - the factored matrix
179: Level: developer
181: Note:
182: This can also be called on non-factored matrices that come from, for example, matrices used in SOR.
184: .seealso: [](ch_matrices), `Mat`, `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`,
185: `MatGetErrorCode()`, `MatFactorError`
186: @*/
187: PetscErrorCode MatFactorClearError(Mat mat)
188: {
189: PetscFunctionBegin;
191: mat->factorerrortype = MAT_FACTOR_NOERROR;
192: mat->factorerror_zeropivot_value = 0.0;
193: mat->factorerror_zeropivot_row = 0;
194: PetscFunctionReturn(PETSC_SUCCESS);
195: }
197: PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat, PetscBool cols, PetscReal tol, IS *nonzero)
198: {
199: Vec r, l;
200: const PetscScalar *al;
201: PetscInt i, nz, gnz, N, n, st;
203: PetscFunctionBegin;
204: PetscCall(MatCreateVecs(mat, &r, &l));
205: if (!cols) { /* nonzero rows */
206: PetscCall(MatGetOwnershipRange(mat, &st, NULL));
207: PetscCall(MatGetSize(mat, &N, NULL));
208: PetscCall(MatGetLocalSize(mat, &n, NULL));
209: PetscCall(VecSet(l, 0.0));
210: PetscCall(VecSetRandom(r, NULL));
211: PetscCall(MatMult(mat, r, l));
212: PetscCall(VecGetArrayRead(l, &al));
213: } else { /* nonzero columns */
214: PetscCall(MatGetOwnershipRangeColumn(mat, &st, NULL));
215: PetscCall(MatGetSize(mat, NULL, &N));
216: PetscCall(MatGetLocalSize(mat, NULL, &n));
217: PetscCall(VecSet(r, 0.0));
218: PetscCall(VecSetRandom(l, NULL));
219: PetscCall(MatMultTranspose(mat, l, r));
220: PetscCall(VecGetArrayRead(r, &al));
221: }
222: if (tol <= 0.0) {
223: for (i = 0, nz = 0; i < n; i++)
224: if (al[i] != 0.0) nz++;
225: } else {
226: for (i = 0, nz = 0; i < n; i++)
227: if (PetscAbsScalar(al[i]) > tol) nz++;
228: }
229: PetscCall(MPIU_Allreduce(&nz, &gnz, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
230: if (gnz != N) {
231: PetscInt *nzr;
232: PetscCall(PetscMalloc1(nz, &nzr));
233: if (nz) {
234: if (tol < 0) {
235: for (i = 0, nz = 0; i < n; i++)
236: if (al[i] != 0.0) nzr[nz++] = i + st;
237: } else {
238: for (i = 0, nz = 0; i < n; i++)
239: if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i + st;
240: }
241: }
242: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nz, nzr, PETSC_OWN_POINTER, nonzero));
243: } else *nonzero = NULL;
244: if (!cols) { /* nonzero rows */
245: PetscCall(VecRestoreArrayRead(l, &al));
246: } else {
247: PetscCall(VecRestoreArrayRead(r, &al));
248: }
249: PetscCall(VecDestroy(&l));
250: PetscCall(VecDestroy(&r));
251: PetscFunctionReturn(PETSC_SUCCESS);
252: }
254: /*@
255: MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
257: Input Parameter:
258: . mat - the matrix
260: Output Parameter:
261: . keptrows - the rows that are not completely zero
263: Level: intermediate
265: Note:
266: `keptrows` is set to `NULL` if all rows are nonzero.
268: .seealso: [](ch_matrices), `Mat`, `MatFindZeroRows()`
269: @*/
270: PetscErrorCode MatFindNonzeroRows(Mat mat, IS *keptrows)
271: {
272: PetscFunctionBegin;
275: PetscAssertPointer(keptrows, 2);
276: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
277: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
278: if (mat->ops->findnonzerorows) PetscUseTypeMethod(mat, findnonzerorows, keptrows);
279: else PetscCall(MatFindNonzeroRowsOrCols_Basic(mat, PETSC_FALSE, 0.0, keptrows));
280: PetscFunctionReturn(PETSC_SUCCESS);
281: }
283: /*@
284: MatFindZeroRows - Locate all rows that are completely zero in the matrix
286: Input Parameter:
287: . mat - the matrix
289: Output Parameter:
290: . zerorows - the rows that are completely zero
292: Level: intermediate
294: Note:
295: `zerorows` is set to `NULL` if no rows are zero.
297: .seealso: [](ch_matrices), `Mat`, `MatFindNonzeroRows()`
298: @*/
299: PetscErrorCode MatFindZeroRows(Mat mat, IS *zerorows)
300: {
301: IS keptrows;
302: PetscInt m, n;
304: PetscFunctionBegin;
307: PetscAssertPointer(zerorows, 2);
308: PetscCall(MatFindNonzeroRows(mat, &keptrows));
309: /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
310: In keeping with this convention, we set zerorows to NULL if there are no zero
311: rows. */
312: if (keptrows == NULL) {
313: *zerorows = NULL;
314: } else {
315: PetscCall(MatGetOwnershipRange(mat, &m, &n));
316: PetscCall(ISComplement(keptrows, m, n, zerorows));
317: PetscCall(ISDestroy(&keptrows));
318: }
319: PetscFunctionReturn(PETSC_SUCCESS);
320: }
322: /*@
323: MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
325: Not Collective
327: Input Parameter:
328: . A - the matrix
330: Output Parameter:
331: . a - the diagonal part (which is a SEQUENTIAL matrix)
333: Level: advanced
335: Notes:
336: See `MatCreateAIJ()` for more information on the "diagonal part" of the matrix.
338: Use caution, as the reference count on the returned matrix is not incremented and it is used as part of `A`'s normal operation.
340: .seealso: [](ch_matrices), `Mat`, `MatCreateAIJ()`, `MATAIJ`, `MATBAIJ`, `MATSBAIJ`
341: @*/
342: PetscErrorCode MatGetDiagonalBlock(Mat A, Mat *a)
343: {
344: PetscFunctionBegin;
347: PetscAssertPointer(a, 2);
348: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
349: if (A->ops->getdiagonalblock) PetscUseTypeMethod(A, getdiagonalblock, a);
350: else {
351: PetscMPIInt size;
353: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
354: PetscCheck(size == 1, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Not for parallel matrix type %s", ((PetscObject)A)->type_name);
355: *a = A;
356: }
357: PetscFunctionReturn(PETSC_SUCCESS);
358: }
360: /*@
361: MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
363: Collective
365: Input Parameter:
366: . mat - the matrix
368: Output Parameter:
369: . trace - the sum of the diagonal entries
371: Level: advanced
373: .seealso: [](ch_matrices), `Mat`
374: @*/
375: PetscErrorCode MatGetTrace(Mat mat, PetscScalar *trace)
376: {
377: Vec diag;
379: PetscFunctionBegin;
381: PetscAssertPointer(trace, 2);
382: PetscCall(MatCreateVecs(mat, &diag, NULL));
383: PetscCall(MatGetDiagonal(mat, diag));
384: PetscCall(VecSum(diag, trace));
385: PetscCall(VecDestroy(&diag));
386: PetscFunctionReturn(PETSC_SUCCESS);
387: }
389: /*@
390: MatRealPart - Zeros out the imaginary part of the matrix
392: Logically Collective
394: Input Parameter:
395: . mat - the matrix
397: Level: advanced
399: .seealso: [](ch_matrices), `Mat`, `MatImaginaryPart()`
400: @*/
401: PetscErrorCode MatRealPart(Mat mat)
402: {
403: PetscFunctionBegin;
406: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
407: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
408: MatCheckPreallocated(mat, 1);
409: PetscUseTypeMethod(mat, realpart);
410: PetscFunctionReturn(PETSC_SUCCESS);
411: }
413: /*@C
414: MatGetGhosts - Get the global indices of all ghost nodes defined by the sparse matrix
416: Collective
418: Input Parameter:
419: . mat - the matrix
421: Output Parameters:
422: + nghosts - number of ghosts (for `MATBAIJ` and `MATSBAIJ` matrices there is one ghost for each matrix block)
423: - ghosts - the global indices of the ghost points
425: Level: advanced
427: Note:
428: `nghosts` and `ghosts` are suitable to pass into `VecCreateGhost()` or `VecCreateGhostBlock()`
430: .seealso: [](ch_matrices), `Mat`, `VecCreateGhost()`, `VecCreateGhostBlock()`
431: @*/
432: PetscErrorCode MatGetGhosts(Mat mat, PetscInt *nghosts, const PetscInt *ghosts[])
433: {
434: PetscFunctionBegin;
437: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
438: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
439: if (mat->ops->getghosts) PetscUseTypeMethod(mat, getghosts, nghosts, ghosts);
440: else {
441: if (nghosts) *nghosts = 0;
442: if (ghosts) *ghosts = NULL;
443: }
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: /*@
448: MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
450: Logically Collective
452: Input Parameter:
453: . mat - the matrix
455: Level: advanced
457: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`
458: @*/
459: PetscErrorCode MatImaginaryPart(Mat mat)
460: {
461: PetscFunctionBegin;
464: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
465: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
466: MatCheckPreallocated(mat, 1);
467: PetscUseTypeMethod(mat, imaginarypart);
468: PetscFunctionReturn(PETSC_SUCCESS);
469: }
471: /*@
472: MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for `MATBAIJ` and `MATSBAIJ` matrices) in the nonzero structure
474: Not Collective
476: Input Parameter:
477: . mat - the matrix
479: Output Parameters:
480: + missing - is any diagonal entry missing
481: - dd - first diagonal entry that is missing (optional) on this process
483: Level: advanced
485: Note:
486: This does not return diagonal entries that are in the nonzero structure but happen to have a zero numerical value
488: .seealso: [](ch_matrices), `Mat`
489: @*/
490: PetscErrorCode MatMissingDiagonal(Mat mat, PetscBool *missing, PetscInt *dd)
491: {
492: PetscFunctionBegin;
495: PetscAssertPointer(missing, 2);
496: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix %s", ((PetscObject)mat)->type_name);
497: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
498: PetscUseTypeMethod(mat, missingdiagonal, missing, dd);
499: PetscFunctionReturn(PETSC_SUCCESS);
500: }
502: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
503: /*@C
504: MatGetRow - Gets a row of a matrix. You MUST call `MatRestoreRow()`
505: for each row that you get to ensure that your application does
506: not bleed memory.
508: Not Collective
510: Input Parameters:
511: + mat - the matrix
512: - row - the row to get
514: Output Parameters:
515: + ncols - if not `NULL`, the number of nonzeros in `row`
516: . cols - if not `NULL`, the column numbers
517: - vals - if not `NULL`, the numerical values
519: Level: advanced
521: Notes:
522: This routine is provided for people who need to have direct access
523: to the structure of a matrix. We hope that we provide enough
524: high-level matrix routines that few users will need it.
526: `MatGetRow()` always returns 0-based column indices, regardless of
527: whether the internal representation is 0-based (default) or 1-based.
529: For better efficiency, set `cols` and/or `vals` to `NULL` if you do
530: not wish to extract these quantities.
532: The user can only examine the values extracted with `MatGetRow()`;
533: the values CANNOT be altered. To change the matrix entries, one
534: must use `MatSetValues()`.
536: You can only have one call to `MatGetRow()` outstanding for a particular
537: matrix at a time, per processor. `MatGetRow()` can only obtain rows
538: associated with the given processor, it cannot get rows from the
539: other processors; for that we suggest using `MatCreateSubMatrices()`, then
540: `MatGetRow()` on the submatrix. The row index passed to `MatGetRow()`
541: is in the global number of rows.
543: Use `MatGetRowIJ()` and `MatRestoreRowIJ()` to access all the local indices of the sparse matrix.
545: Use `MatSeqAIJGetArray()` and similar functions to access the numerical values for certain matrix types directly.
547: Fortran Note:
548: The calling sequence is
549: .vb
550: MatGetRow(matrix,row,ncols,cols,values,ierr)
551: Mat matrix (input)
552: integer row (input)
553: integer ncols (output)
554: integer cols(maxcols) (output)
555: double precision (or double complex) values(maxcols) output
556: .ve
557: where maxcols >= maximum nonzeros in any row of the matrix.
559: .seealso: [](ch_matrices), `Mat`, `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`, `MatGetRowIJ()`, `MatRestoreRowIJ()`
560: @*/
561: PetscErrorCode MatGetRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
562: {
563: PetscInt incols;
565: PetscFunctionBegin;
568: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
569: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
570: MatCheckPreallocated(mat, 1);
571: PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")", row, mat->rmap->rstart, mat->rmap->rend);
572: PetscCall(PetscLogEventBegin(MAT_GetRow, mat, 0, 0, 0));
573: PetscUseTypeMethod(mat, getrow, row, &incols, (PetscInt **)cols, (PetscScalar **)vals);
574: if (ncols) *ncols = incols;
575: PetscCall(PetscLogEventEnd(MAT_GetRow, mat, 0, 0, 0));
576: PetscFunctionReturn(PETSC_SUCCESS);
577: }
579: /*@
580: MatConjugate - replaces the matrix values with their complex conjugates
582: Logically Collective
584: Input Parameter:
585: . mat - the matrix
587: Level: advanced
589: .seealso: [](ch_matrices), `Mat`, `MatRealPart()`, `MatImaginaryPart()`, `VecConjugate()`, `MatTranspose()`
590: @*/
591: PetscErrorCode MatConjugate(Mat mat)
592: {
593: PetscFunctionBegin;
595: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
596: if (PetscDefined(USE_COMPLEX) && mat->hermitian != PETSC_BOOL3_TRUE) {
597: PetscUseTypeMethod(mat, conjugate);
598: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
599: }
600: PetscFunctionReturn(PETSC_SUCCESS);
601: }
603: /*@C
604: MatRestoreRow - Frees any temporary space allocated by `MatGetRow()`.
606: Not Collective
608: Input Parameters:
609: + mat - the matrix
610: . row - the row to get
611: . ncols - the number of nonzeros
612: . cols - the columns of the nonzeros
613: - vals - if nonzero the column values
615: Level: advanced
617: Notes:
618: This routine should be called after you have finished examining the entries.
620: This routine zeros out `ncols`, `cols`, and `vals`. This is to prevent accidental
621: us of the array after it has been restored. If you pass `NULL`, it will
622: not zero the pointers. Use of `cols` or `vals` after `MatRestoreRow()` is invalid.
624: Fortran Notes:
625: The calling sequence is
626: .vb
627: MatRestoreRow(matrix,row,ncols,cols,values,ierr)
628: Mat matrix (input)
629: integer row (input)
630: integer ncols (output)
631: integer cols(maxcols) (output)
632: double precision (or double complex) values(maxcols) output
633: .ve
634: Where maxcols >= maximum nonzeros in any row of the matrix.
636: In Fortran `MatRestoreRow()` MUST be called after `MatGetRow()`
637: before another call to `MatGetRow()` can be made.
639: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`
640: @*/
641: PetscErrorCode MatRestoreRow(Mat mat, PetscInt row, PetscInt *ncols, const PetscInt *cols[], const PetscScalar *vals[])
642: {
643: PetscFunctionBegin;
645: if (ncols) PetscAssertPointer(ncols, 3);
646: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
647: if (!mat->ops->restorerow) PetscFunctionReturn(PETSC_SUCCESS);
648: PetscUseTypeMethod(mat, restorerow, row, ncols, (PetscInt **)cols, (PetscScalar **)vals);
649: if (ncols) *ncols = 0;
650: if (cols) *cols = NULL;
651: if (vals) *vals = NULL;
652: PetscFunctionReturn(PETSC_SUCCESS);
653: }
655: /*@
656: MatGetRowUpperTriangular - Sets a flag to enable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
657: You should call `MatRestoreRowUpperTriangular()` after calling` MatGetRow()` and `MatRestoreRow()` to disable the flag.
659: Not Collective
661: Input Parameter:
662: . mat - the matrix
664: Level: advanced
666: Note:
667: The flag is to ensure that users are aware that `MatGetRow()` only provides the upper triangular part of the row for the matrices in `MATSBAIJ` format.
669: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatRestoreRowUpperTriangular()`
670: @*/
671: PetscErrorCode MatGetRowUpperTriangular(Mat mat)
672: {
673: PetscFunctionBegin;
676: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
677: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
678: MatCheckPreallocated(mat, 1);
679: if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
680: PetscUseTypeMethod(mat, getrowuppertriangular);
681: PetscFunctionReturn(PETSC_SUCCESS);
682: }
684: /*@
685: MatRestoreRowUpperTriangular - Disable calls to `MatGetRow()` for matrix in `MATSBAIJ` format.
687: Not Collective
689: Input Parameter:
690: . mat - the matrix
692: Level: advanced
694: Note:
695: This routine should be called after you have finished calls to `MatGetRow()` and `MatRestoreRow()`.
697: .seealso: [](ch_matrices), `Mat`, `MATSBAIJ`, `MatGetRowUpperTriangular()`
698: @*/
699: PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
700: {
701: PetscFunctionBegin;
704: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
705: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
706: MatCheckPreallocated(mat, 1);
707: if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(PETSC_SUCCESS);
708: PetscUseTypeMethod(mat, restorerowuppertriangular);
709: PetscFunctionReturn(PETSC_SUCCESS);
710: }
712: /*@C
713: MatSetOptionsPrefix - Sets the prefix used for searching for all
714: `Mat` options in the database.
716: Logically Collective
718: Input Parameters:
719: + A - the matrix
720: - prefix - the prefix to prepend to all option names
722: Level: advanced
724: Notes:
725: A hyphen (-) must NOT be given at the beginning of the prefix name.
726: The first character of all runtime options is AUTOMATICALLY the hyphen.
728: This is NOT used for options for the factorization of the matrix. Normally the
729: prefix is automatically passed in from the PC calling the factorization. To set
730: it directly use `MatSetOptionsPrefixFactor()`
732: .seealso: [](ch_matrices), `Mat`, `MatSetFromOptions()`, `MatSetOptionsPrefixFactor()`
733: @*/
734: PetscErrorCode MatSetOptionsPrefix(Mat A, const char prefix[])
735: {
736: PetscFunctionBegin;
738: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A, prefix));
739: PetscFunctionReturn(PETSC_SUCCESS);
740: }
742: /*@C
743: MatSetOptionsPrefixFactor - Sets the prefix used for searching for all matrix factor options in the database for
744: for matrices created with `MatGetFactor()`
746: Logically Collective
748: Input Parameters:
749: + A - the matrix
750: - prefix - the prefix to prepend to all option names for the factored matrix
752: Level: developer
754: Notes:
755: A hyphen (-) must NOT be given at the beginning of the prefix name.
756: The first character of all runtime options is AUTOMATICALLY the hyphen.
758: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
759: it directly when not using `KSP`/`PC` use `MatSetOptionsPrefixFactor()`
761: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSetFromOptions()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`
762: @*/
763: PetscErrorCode MatSetOptionsPrefixFactor(Mat A, const char prefix[])
764: {
765: PetscFunctionBegin;
767: if (prefix) {
768: PetscAssertPointer(prefix, 2);
769: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
770: if (prefix != A->factorprefix) {
771: PetscCall(PetscFree(A->factorprefix));
772: PetscCall(PetscStrallocpy(prefix, &A->factorprefix));
773: }
774: } else PetscCall(PetscFree(A->factorprefix));
775: PetscFunctionReturn(PETSC_SUCCESS);
776: }
778: /*@C
779: MatAppendOptionsPrefixFactor - Appends to the prefix used for searching for all matrix factor options in the database for
780: for matrices created with `MatGetFactor()`
782: Logically Collective
784: Input Parameters:
785: + A - the matrix
786: - prefix - the prefix to prepend to all option names for the factored matrix
788: Level: developer
790: Notes:
791: A hyphen (-) must NOT be given at the beginning of the prefix name.
792: The first character of all runtime options is AUTOMATICALLY the hyphen.
794: Normally the prefix is automatically passed in from the `PC` calling the factorization. To set
795: it directly when not using `KSP`/`PC` use `MatAppendOptionsPrefixFactor()`
797: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `PetscOptionsCreate()`, `PetscOptionsDestroy()`, `PetscObjectSetOptionsPrefix()`, `PetscObjectPrependOptionsPrefix()`,
798: `PetscObjectGetOptionsPrefix()`, `TSAppendOptionsPrefix()`, `SNESAppendOptionsPrefix()`, `KSPAppendOptionsPrefix()`, `MatSetOptionsPrefixFactor()`,
799: `MatSetOptionsPrefix()`
800: @*/
801: PetscErrorCode MatAppendOptionsPrefixFactor(Mat A, const char prefix[])
802: {
803: size_t len1, len2, new_len;
805: PetscFunctionBegin;
807: if (!prefix) PetscFunctionReturn(PETSC_SUCCESS);
808: if (!A->factorprefix) {
809: PetscCall(MatSetOptionsPrefixFactor(A, prefix));
810: PetscFunctionReturn(PETSC_SUCCESS);
811: }
812: PetscCheck(prefix[0] != '-', PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONG, "Options prefix should not begin with a hyphen");
814: PetscCall(PetscStrlen(A->factorprefix, &len1));
815: PetscCall(PetscStrlen(prefix, &len2));
816: new_len = len1 + len2 + 1;
817: PetscCall(PetscRealloc(new_len * sizeof(*(A->factorprefix)), &A->factorprefix));
818: PetscCall(PetscStrncpy(A->factorprefix + len1, prefix, len2 + 1));
819: PetscFunctionReturn(PETSC_SUCCESS);
820: }
822: /*@C
823: MatAppendOptionsPrefix - Appends to the prefix used for searching for all
824: matrix options in the database.
826: Logically Collective
828: Input Parameters:
829: + A - the matrix
830: - prefix - the prefix to prepend to all option names
832: Level: advanced
834: Note:
835: A hyphen (-) must NOT be given at the beginning of the prefix name.
836: The first character of all runtime options is AUTOMATICALLY the hyphen.
838: .seealso: [](ch_matrices), `Mat`, `MatGetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefix()`
839: @*/
840: PetscErrorCode MatAppendOptionsPrefix(Mat A, const char prefix[])
841: {
842: PetscFunctionBegin;
844: PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A, prefix));
845: PetscFunctionReturn(PETSC_SUCCESS);
846: }
848: /*@C
849: MatGetOptionsPrefix - Gets the prefix used for searching for all
850: matrix options in the database.
852: Not Collective
854: Input Parameter:
855: . A - the matrix
857: Output Parameter:
858: . prefix - pointer to the prefix string used
860: Level: advanced
862: Fortran Note:
863: The user should pass in a string `prefix` of
864: sufficient length to hold the prefix.
866: .seealso: [](ch_matrices), `Mat`, `MatAppendOptionsPrefix()`, `MatSetOptionsPrefix()`, `MatAppendOptionsPrefixFactor()`, `MatSetOptionsPrefixFactor()`
867: @*/
868: PetscErrorCode MatGetOptionsPrefix(Mat A, const char *prefix[])
869: {
870: PetscFunctionBegin;
872: PetscAssertPointer(prefix, 2);
873: PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A, prefix));
874: PetscFunctionReturn(PETSC_SUCCESS);
875: }
877: /*@
878: MatResetPreallocation - Reset matrix to use the original nonzero pattern provided by the user.
880: Collective
882: Input Parameter:
883: . A - the matrix
885: Level: beginner
887: Notes:
888: The allocated memory will be shrunk after calling `MatAssemblyBegin()` and `MatAssemblyEnd()` with `MAT_FINAL_ASSEMBLY`.
890: Users can reset the preallocation to access the original memory.
892: Currently only supported for `MATAIJ` matrices.
894: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
895: @*/
896: PetscErrorCode MatResetPreallocation(Mat A)
897: {
898: PetscFunctionBegin;
901: PetscCheck(A->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot reset preallocation after setting some values but not yet calling MatAssemblyBegin()/MatAsssemblyEnd()");
902: if (A->num_ass == 0) PetscFunctionReturn(PETSC_SUCCESS);
903: PetscUseMethod(A, "MatResetPreallocation_C", (Mat), (A));
904: PetscFunctionReturn(PETSC_SUCCESS);
905: }
907: /*@
908: MatSetUp - Sets up the internal matrix data structures for later use.
910: Collective
912: Input Parameter:
913: . A - the matrix
915: Level: intermediate
917: Notes:
918: If the user has not set preallocation for this matrix then an efficient algorithm will be used for the first round of
919: setting values in the matrix.
921: This routine is called internally by other matrix functions when needed so rarely needs to be called by users
923: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatCreate()`, `MatDestroy()`, `MatXAIJSetPreallocation()`
924: @*/
925: PetscErrorCode MatSetUp(Mat A)
926: {
927: PetscFunctionBegin;
929: if (!((PetscObject)A)->type_name) {
930: PetscMPIInt size;
932: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
933: PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
934: }
935: if (!A->preallocated) PetscTryTypeMethod(A, setup);
936: PetscCall(PetscLayoutSetUp(A->rmap));
937: PetscCall(PetscLayoutSetUp(A->cmap));
938: A->preallocated = PETSC_TRUE;
939: PetscFunctionReturn(PETSC_SUCCESS);
940: }
942: #if defined(PETSC_HAVE_SAWS)
943: #include <petscviewersaws.h>
944: #endif
946: /*
947: If threadsafety is on extraneous matrices may be printed
949: This flag cannot be stored in the matrix because the original matrix in MatView() may assemble a new matrix which is passed into MatViewFromOptions()
950: */
951: #if !defined(PETSC_HAVE_THREADSAFETY)
952: static PetscInt insidematview = 0;
953: #endif
955: /*@C
956: MatViewFromOptions - View properties of the matrix based on options set in the options database
958: Collective
960: Input Parameters:
961: + A - the matrix
962: . obj - optional additional object that provides the options prefix to use
963: - name - command line option
965: Options Database Key:
966: . -mat_view [viewertype]:... - the viewer and its options
968: Level: intermediate
970: Note:
971: .vb
972: If no value is provided ascii:stdout is used
973: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
974: for example ascii::ascii_info prints just the information about the object not all details
975: unless :append is given filename opens in write mode, overwriting what was already there
976: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
977: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
978: socket[:port] defaults to the standard output port
979: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
980: .ve
982: .seealso: [](ch_matrices), `Mat`, `MatView()`, `PetscObjectViewFromOptions()`, `MatCreate()`
983: @*/
984: PetscErrorCode MatViewFromOptions(Mat A, PetscObject obj, const char name[])
985: {
986: PetscFunctionBegin;
988: #if !defined(PETSC_HAVE_THREADSAFETY)
989: if (insidematview) PetscFunctionReturn(PETSC_SUCCESS);
990: #endif
991: PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
992: PetscFunctionReturn(PETSC_SUCCESS);
993: }
995: /*@C
996: MatView - display information about a matrix in a variety ways
998: Collective
1000: Input Parameters:
1001: + mat - the matrix
1002: - viewer - visualization context
1004: Options Database Keys:
1005: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
1006: . -mat_view ::ascii_info_detail - Prints more detailed info
1007: . -mat_view - Prints matrix in ASCII format
1008: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
1009: . -mat_view draw - PetscDraws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
1010: . -display <name> - Sets display name (default is host)
1011: . -draw_pause <sec> - Sets number of seconds to pause after display
1012: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (see Users-Manual: ch_matlab for details)
1013: . -viewer_socket_machine <machine> - -
1014: . -viewer_socket_port <port> - -
1015: . -mat_view binary - save matrix to file in binary format
1016: - -viewer_binary_filename <name> - -
1018: Level: beginner
1020: Notes:
1021: The available visualization contexts include
1022: + `PETSC_VIEWER_STDOUT_SELF` - for sequential matrices
1023: . `PETSC_VIEWER_STDOUT_WORLD` - for parallel matrices created on `PETSC_COMM_WORLD`
1024: . `PETSC_VIEWER_STDOUT_`(comm) - for matrices created on MPI communicator comm
1025: - `PETSC_VIEWER_DRAW_WORLD` - graphical display of nonzero structure
1027: The user can open alternative visualization contexts with
1028: + `PetscViewerASCIIOpen()` - Outputs matrix to a specified file
1029: . `PetscViewerBinaryOpen()` - Outputs matrix in binary to a
1030: specified file; corresponding input uses `MatLoad()`
1031: . `PetscViewerDrawOpen()` - Outputs nonzero matrix structure to
1032: an X window display
1033: - `PetscViewerSocketOpen()` - Outputs matrix to Socket viewer.
1034: Currently only the `MATSEQDENSE` and `MATAIJ`
1035: matrix types support the Socket viewer.
1037: The user can call `PetscViewerPushFormat()` to specify the output
1038: format of ASCII printed objects (when using `PETSC_VIEWER_STDOUT_SELF`,
1039: `PETSC_VIEWER_STDOUT_WORLD` and `PetscViewerASCIIOpen()`). Available formats include
1040: + `PETSC_VIEWER_DEFAULT` - default, prints matrix contents
1041: . `PETSC_VIEWER_ASCII_MATLAB` - prints matrix contents in MATLAB format
1042: . `PETSC_VIEWER_ASCII_DENSE` - prints entire matrix including zeros
1043: . `PETSC_VIEWER_ASCII_COMMON` - prints matrix contents, using a sparse
1044: format common among all matrix types
1045: . `PETSC_VIEWER_ASCII_IMPL` - prints matrix contents, using an implementation-specific
1046: format (which is in many cases the same as the default)
1047: . `PETSC_VIEWER_ASCII_INFO` - prints basic information about the matrix
1048: size and structure (not the matrix entries)
1049: - `PETSC_VIEWER_ASCII_INFO_DETAIL` - prints more detailed information about
1050: the matrix structure
1052: The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
1053: the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
1055: In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
1057: See the manual page for `MatLoad()` for the exact format of the binary file when the binary
1058: viewer is used.
1060: See share/petsc/matlab/PetscBinaryRead.m for a MATLAB code that can read in the binary file when the binary
1061: viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
1063: One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
1064: and then use the following mouse functions.
1065: .vb
1066: left mouse: zoom in
1067: middle mouse: zoom out
1068: right mouse: continue with the simulation
1069: .ve
1071: .seealso: [](ch_matrices), `Mat`, `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`, `PetscViewer`,
1072: `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`, `MatViewFromOptions()`
1073: @*/
1074: PetscErrorCode MatView(Mat mat, PetscViewer viewer)
1075: {
1076: PetscInt rows, cols, rbs, cbs;
1077: PetscBool isascii, isstring, issaws;
1078: PetscViewerFormat format;
1079: PetscMPIInt size;
1081: PetscFunctionBegin;
1084: if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat), &viewer));
1086: PetscCheckSameComm(mat, 1, viewer, 2);
1088: PetscCall(PetscViewerGetFormat(viewer, &format));
1089: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1090: if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(PETSC_SUCCESS);
1092: #if !defined(PETSC_HAVE_THREADSAFETY)
1093: insidematview++;
1094: #endif
1095: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSTRING, &isstring));
1096: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
1097: PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERSAWS, &issaws));
1098: PetscCheck((isascii && (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) || !mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "No viewers for factored matrix except ASCII, info, or info_detail");
1100: PetscCall(PetscLogEventBegin(MAT_View, mat, viewer, 0, 0));
1101: if (isascii) {
1102: if (!mat->preallocated) {
1103: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been preallocated yet\n"));
1104: #if !defined(PETSC_HAVE_THREADSAFETY)
1105: insidematview--;
1106: #endif
1107: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1108: PetscFunctionReturn(PETSC_SUCCESS);
1109: }
1110: if (!mat->assembled) {
1111: PetscCall(PetscViewerASCIIPrintf(viewer, "Matrix has not been assembled yet\n"));
1112: #if !defined(PETSC_HAVE_THREADSAFETY)
1113: insidematview--;
1114: #endif
1115: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1116: PetscFunctionReturn(PETSC_SUCCESS);
1117: }
1118: PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat, viewer));
1119: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1120: MatNullSpace nullsp, transnullsp;
1122: PetscCall(PetscViewerASCIIPushTab(viewer));
1123: PetscCall(MatGetSize(mat, &rows, &cols));
1124: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
1125: if (rbs != 1 || cbs != 1) {
1126: if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "\n", rows, cols, rbs, cbs));
1127: else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "\n", rows, cols, rbs));
1128: } else PetscCall(PetscViewerASCIIPrintf(viewer, "rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n", rows, cols));
1129: if (mat->factortype) {
1130: MatSolverType solver;
1131: PetscCall(MatFactorGetSolverType(mat, &solver));
1132: PetscCall(PetscViewerASCIIPrintf(viewer, "package used to perform factorization: %s\n", solver));
1133: }
1134: if (mat->ops->getinfo) {
1135: MatInfo info;
1136: PetscCall(MatGetInfo(mat, MAT_GLOBAL_SUM, &info));
1137: PetscCall(PetscViewerASCIIPrintf(viewer, "total: nonzeros=%.f, allocated nonzeros=%.f\n", info.nz_used, info.nz_allocated));
1138: if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer, "total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n", (PetscInt)info.mallocs));
1139: }
1140: PetscCall(MatGetNullSpace(mat, &nullsp));
1141: PetscCall(MatGetTransposeNullSpace(mat, &transnullsp));
1142: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached null space\n"));
1143: if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached transposed null space\n"));
1144: PetscCall(MatGetNearNullSpace(mat, &nullsp));
1145: if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer, " has attached near null space\n"));
1146: PetscCall(PetscViewerASCIIPushTab(viewer));
1147: PetscCall(MatProductView(mat, viewer));
1148: PetscCall(PetscViewerASCIIPopTab(viewer));
1149: }
1150: } else if (issaws) {
1151: #if defined(PETSC_HAVE_SAWS)
1152: PetscMPIInt rank;
1154: PetscCall(PetscObjectName((PetscObject)mat));
1155: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
1156: if (!((PetscObject)mat)->amsmem && rank == 0) PetscCall(PetscObjectViewSAWs((PetscObject)mat, viewer));
1157: #endif
1158: } else if (isstring) {
1159: const char *type;
1160: PetscCall(MatGetType(mat, &type));
1161: PetscCall(PetscViewerStringSPrintf(viewer, " MatType: %-7.7s", type));
1162: PetscTryTypeMethod(mat, view, viewer);
1163: }
1164: if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1165: PetscCall(PetscViewerASCIIPushTab(viewer));
1166: PetscUseTypeMethod(mat, viewnative, viewer);
1167: PetscCall(PetscViewerASCIIPopTab(viewer));
1168: } else if (mat->ops->view) {
1169: PetscCall(PetscViewerASCIIPushTab(viewer));
1170: PetscUseTypeMethod(mat, view, viewer);
1171: PetscCall(PetscViewerASCIIPopTab(viewer));
1172: }
1173: if (isascii) {
1174: PetscCall(PetscViewerGetFormat(viewer, &format));
1175: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) PetscCall(PetscViewerASCIIPopTab(viewer));
1176: }
1177: PetscCall(PetscLogEventEnd(MAT_View, mat, viewer, 0, 0));
1178: #if !defined(PETSC_HAVE_THREADSAFETY)
1179: insidematview--;
1180: #endif
1181: PetscFunctionReturn(PETSC_SUCCESS);
1182: }
1184: #if defined(PETSC_USE_DEBUG)
1185: #include <../src/sys/totalview/tv_data_display.h>
1186: PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1187: {
1188: TV_add_row("Local rows", "int", &mat->rmap->n);
1189: TV_add_row("Local columns", "int", &mat->cmap->n);
1190: TV_add_row("Global rows", "int", &mat->rmap->N);
1191: TV_add_row("Global columns", "int", &mat->cmap->N);
1192: TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1193: return TV_format_OK;
1194: }
1195: #endif
1197: /*@C
1198: MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1199: with `MatView()`. The matrix format is determined from the options database.
1200: Generates a parallel MPI matrix if the communicator has more than one
1201: processor. The default matrix type is `MATAIJ`.
1203: Collective
1205: Input Parameters:
1206: + mat - the newly loaded matrix, this needs to have been created with `MatCreate()`
1207: or some related function before a call to `MatLoad()`
1208: - viewer - `PETSCVIEWERBINARY`/`PETSCVIEWERHDF5` file viewer
1210: Options Database Key:
1211: . -matload_block_size <bs> - set block size
1213: Level: beginner
1215: Notes:
1216: If the `Mat` type has not yet been given then `MATAIJ` is used, call `MatSetFromOptions()` on the
1217: `Mat` before calling this routine if you wish to set it from the options database.
1219: `MatLoad()` automatically loads into the options database any options
1220: given in the file filename.info where filename is the name of the file
1221: that was passed to the `PetscViewerBinaryOpen()`. The options in the info
1222: file will be ignored if you use the -viewer_binary_skip_info option.
1224: If the type or size of mat is not set before a call to `MatLoad()`, PETSc
1225: sets the default matrix type AIJ and sets the local and global sizes.
1226: If type and/or size is already set, then the same are used.
1228: In parallel, each processor can load a subset of rows (or the
1229: entire matrix). This routine is especially useful when a large
1230: matrix is stored on disk and only part of it is desired on each
1231: processor. For example, a parallel solver may access only some of
1232: the rows from each processor. The algorithm used here reads
1233: relatively small blocks of data rather than reading the entire
1234: matrix and then subsetting it.
1236: Viewer's `PetscViewerType` must be either `PETSCVIEWERBINARY` or `PETSCVIEWERHDF5`.
1237: Such viewer can be created using `PetscViewerBinaryOpen()` or `PetscViewerHDF5Open()`,
1238: or the sequence like
1239: .vb
1240: `PetscViewer` v;
1241: `PetscViewerCreate`(`PETSC_COMM_WORLD`,&v);
1242: `PetscViewerSetType`(v,`PETSCVIEWERBINARY`);
1243: `PetscViewerSetFromOptions`(v);
1244: `PetscViewerFileSetMode`(v,`FILE_MODE_READ`);
1245: `PetscViewerFileSetName`(v,"datafile");
1246: .ve
1247: The optional `PetscViewerSetFromOptions()` call allows overriding `PetscViewerSetType()` using the option
1248: $ -viewer_type {binary, hdf5}
1250: See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1251: and src/mat/tutorials/ex10.c with the second approach.
1253: In case of `PETSCVIEWERBINARY`, a native PETSc binary format is used. Each of the blocks
1254: is read onto MPI rank 0 and then shipped to its destination MPI rank, one after another.
1255: Multiple objects, both matrices and vectors, can be stored within the same file.
1256: Their `PetscObject` name is ignored; they are loaded in the order of their storage.
1258: Most users should not need to know the details of the binary storage
1259: format, since `MatLoad()` and `MatView()` completely hide these details.
1260: But for anyone who is interested, the standard binary matrix storage
1261: format is
1263: .vb
1264: PetscInt MAT_FILE_CLASSID
1265: PetscInt number of rows
1266: PetscInt number of columns
1267: PetscInt total number of nonzeros
1268: PetscInt *number nonzeros in each row
1269: PetscInt *column indices of all nonzeros (starting index is zero)
1270: PetscScalar *values of all nonzeros
1271: .ve
1272: If PETSc was not configured with `--with-64-bit-indices` then only `MATMPIAIJ` matrices with more than `PETSC_INT_MAX` non-zeros can be
1273: stored or loaded (each MPI process part of the matrix must have less than `PETSC_INT_MAX` nonzeros). Since the total nonzero count in this
1274: case will not fit in a (32-bit) `PetscInt` the value `PETSC_INT_MAX` is used for the header entry `total number of nonzeros`.
1276: PETSc automatically does the byte swapping for
1277: machines that store the bytes reversed. Thus if you write your own binary
1278: read/write routines you have to swap the bytes; see `PetscBinaryRead()`
1279: and `PetscBinaryWrite()` to see how this may be done.
1281: In case of `PETSCVIEWERHDF5`, a parallel HDF5 reader is used.
1282: Each processor's chunk is loaded independently by its owning MPI process.
1283: Multiple objects, both matrices and vectors, can be stored within the same file.
1284: They are looked up by their PetscObject name.
1286: As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1287: by default the same structure and naming of the AIJ arrays and column count
1288: within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1289: $ save example.mat A b -v7.3
1290: can be directly read by this routine (see Reference 1 for details).
1292: Depending on your MATLAB version, this format might be a default,
1293: otherwise you can set it as default in Preferences.
1295: Unless -nocompression flag is used to save the file in MATLAB,
1296: PETSc must be configured with ZLIB package.
1298: See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1300: This reader currently supports only real `MATSEQAIJ`, `MATMPIAIJ`, `MATSEQDENSE` and `MATMPIDENSE` matrices for `PETSCVIEWERHDF5`
1302: Corresponding `MatView()` is not yet implemented.
1304: The loaded matrix is actually a transpose of the original one in MATLAB,
1305: unless you push `PETSC_VIEWER_HDF5_MAT` format (see examples above).
1306: With this format, matrix is automatically transposed by PETSc,
1307: unless the matrix is marked as SPD or symmetric
1308: (see `MatSetOption()`, `MAT_SPD`, `MAT_SYMMETRIC`).
1310: See MATLAB Documentation on `save()`, <https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version>
1312: .seealso: [](ch_matrices), `Mat`, `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1313: @*/
1314: PetscErrorCode MatLoad(Mat mat, PetscViewer viewer)
1315: {
1316: PetscBool flg;
1318: PetscFunctionBegin;
1322: if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat, MATAIJ));
1324: flg = PETSC_FALSE;
1325: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_symmetric", &flg, NULL));
1326: if (flg) {
1327: PetscCall(MatSetOption(mat, MAT_SYMMETRIC, PETSC_TRUE));
1328: PetscCall(MatSetOption(mat, MAT_SYMMETRY_ETERNAL, PETSC_TRUE));
1329: }
1330: flg = PETSC_FALSE;
1331: PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matload_spd", &flg, NULL));
1332: if (flg) PetscCall(MatSetOption(mat, MAT_SPD, PETSC_TRUE));
1334: PetscCall(PetscLogEventBegin(MAT_Load, mat, viewer, 0, 0));
1335: PetscUseTypeMethod(mat, load, viewer);
1336: PetscCall(PetscLogEventEnd(MAT_Load, mat, viewer, 0, 0));
1337: PetscFunctionReturn(PETSC_SUCCESS);
1338: }
1340: static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1341: {
1342: Mat_Redundant *redund = *redundant;
1344: PetscFunctionBegin;
1345: if (redund) {
1346: if (redund->matseq) { /* via MatCreateSubMatrices() */
1347: PetscCall(ISDestroy(&redund->isrow));
1348: PetscCall(ISDestroy(&redund->iscol));
1349: PetscCall(MatDestroySubMatrices(1, &redund->matseq));
1350: } else {
1351: PetscCall(PetscFree2(redund->send_rank, redund->recv_rank));
1352: PetscCall(PetscFree(redund->sbuf_j));
1353: PetscCall(PetscFree(redund->sbuf_a));
1354: for (PetscInt i = 0; i < redund->nrecvs; i++) {
1355: PetscCall(PetscFree(redund->rbuf_j[i]));
1356: PetscCall(PetscFree(redund->rbuf_a[i]));
1357: }
1358: PetscCall(PetscFree4(redund->sbuf_nz, redund->rbuf_nz, redund->rbuf_j, redund->rbuf_a));
1359: }
1361: if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1362: PetscCall(PetscFree(redund));
1363: }
1364: PetscFunctionReturn(PETSC_SUCCESS);
1365: }
1367: /*@C
1368: MatDestroy - Frees space taken by a matrix.
1370: Collective
1372: Input Parameter:
1373: . A - the matrix
1375: Level: beginner
1377: Developer Note:
1378: Some special arrays of matrices are not destroyed in this routine but instead by the routines called by
1379: `MatDestroySubMatrices()`. Thus one must be sure that any changes here must also be made in those routines.
1380: `MatHeaderMerge()` and `MatHeaderReplace()` also manipulate the data in the `Mat` object and likely need changes
1381: if changes are needed here.
1383: .seealso: [](ch_matrices), `Mat`, `MatCreate()`
1384: @*/
1385: PetscErrorCode MatDestroy(Mat *A)
1386: {
1387: PetscFunctionBegin;
1388: if (!*A) PetscFunctionReturn(PETSC_SUCCESS);
1390: if (--((PetscObject)(*A))->refct > 0) {
1391: *A = NULL;
1392: PetscFunctionReturn(PETSC_SUCCESS);
1393: }
1395: /* if memory was published with SAWs then destroy it */
1396: PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1397: PetscTryTypeMethod((*A), destroy);
1399: PetscCall(PetscFree((*A)->factorprefix));
1400: PetscCall(PetscFree((*A)->defaultvectype));
1401: PetscCall(PetscFree((*A)->defaultrandtype));
1402: PetscCall(PetscFree((*A)->bsizes));
1403: PetscCall(PetscFree((*A)->solvertype));
1404: for (PetscInt i = 0; i < MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1405: if ((*A)->redundant && (*A)->redundant->matseq[0] == *A) (*A)->redundant->matseq[0] = NULL;
1406: PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1407: PetscCall(MatProductClear(*A));
1408: PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1409: PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1410: PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1411: PetscCall(MatDestroy(&(*A)->schur));
1412: PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1413: PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1414: PetscCall(PetscHeaderDestroy(A));
1415: PetscFunctionReturn(PETSC_SUCCESS);
1416: }
1418: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1419: /*@C
1420: MatSetValues - Inserts or adds a block of values into a matrix.
1421: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1422: MUST be called after all calls to `MatSetValues()` have been completed.
1424: Not Collective
1426: Input Parameters:
1427: + mat - the matrix
1428: . v - a logically two-dimensional array of values
1429: . m - the number of rows
1430: . idxm - the global indices of the rows
1431: . n - the number of columns
1432: . idxn - the global indices of the columns
1433: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1435: Level: beginner
1437: Notes:
1438: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1440: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1441: options cannot be mixed without intervening calls to the assembly
1442: routines.
1444: `MatSetValues()` uses 0-based row and column numbers in Fortran
1445: as well as in C.
1447: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1448: simply ignored. This allows easily inserting element stiffness matrices
1449: with homogeneous Dirichlet boundary conditions that you don't want represented
1450: in the matrix.
1452: Efficiency Alert:
1453: The routine `MatSetValuesBlocked()` may offer much better efficiency
1454: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1456: Developer Note:
1457: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1458: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1460: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1461: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1462: @*/
1463: PetscErrorCode MatSetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1464: {
1465: PetscFunctionBeginHot;
1468: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1469: PetscAssertPointer(idxm, 3);
1470: PetscAssertPointer(idxn, 5);
1471: MatCheckPreallocated(mat, 1);
1473: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1474: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
1476: if (PetscDefined(USE_DEBUG)) {
1477: PetscInt i, j;
1479: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1480: for (i = 0; i < m; i++) {
1481: for (j = 0; j < n; j++) {
1482: if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i * n + j]))
1483: #if defined(PETSC_USE_COMPLEX)
1484: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)PetscRealPart(v[i * n + j]), (double)PetscImaginaryPart(v[i * n + j]), idxm[i], idxn[j]);
1485: #else
1486: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_FP, "Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")", (double)v[i * n + j], idxm[i], idxn[j]);
1487: #endif
1488: }
1489: }
1490: for (i = 0; i < m; i++) PetscCheck(idxm[i] < mat->rmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxm[i], mat->rmap->N - 1);
1491: for (i = 0; i < n; i++) PetscCheck(idxn[i] < mat->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT, idxn[i], mat->cmap->N - 1);
1492: }
1494: if (mat->assembled) {
1495: mat->was_assembled = PETSC_TRUE;
1496: mat->assembled = PETSC_FALSE;
1497: }
1498: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1499: PetscUseTypeMethod(mat, setvalues, m, idxm, n, idxn, v, addv);
1500: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1501: PetscFunctionReturn(PETSC_SUCCESS);
1502: }
1504: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1505: /*@C
1506: MatSetValuesIS - Inserts or adds a block of values into a matrix using an `IS` to indicate the rows and columns
1507: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
1508: MUST be called after all calls to `MatSetValues()` have been completed.
1510: Not Collective
1512: Input Parameters:
1513: + mat - the matrix
1514: . v - a logically two-dimensional array of values
1515: . ism - the rows to provide
1516: . isn - the columns to provide
1517: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
1519: Level: beginner
1521: Notes:
1522: By default the values, `v`, are stored row-oriented. See `MatSetOption()` for other options.
1524: Calls to `MatSetValues()` with the `INSERT_VALUES` and `ADD_VALUES`
1525: options cannot be mixed without intervening calls to the assembly
1526: routines.
1528: `MatSetValues()` uses 0-based row and column numbers in Fortran
1529: as well as in C.
1531: Negative indices may be passed in `ism` and `isn`, these rows and columns are
1532: simply ignored. This allows easily inserting element stiffness matrices
1533: with homogeneous Dirichlet boundary conditions that you don't want represented
1534: in the matrix.
1536: Efficiency Alert:
1537: The routine `MatSetValuesBlocked()` may offer much better efficiency
1538: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1540: This is currently not optimized for any particular `ISType`
1542: Developer Note:
1543: This is labeled with C so does not automatically generate Fortran stubs and interfaces
1544: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1546: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatSetValues()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1547: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1548: @*/
1549: PetscErrorCode MatSetValuesIS(Mat mat, IS ism, IS isn, const PetscScalar v[], InsertMode addv)
1550: {
1551: PetscInt m, n;
1552: const PetscInt *rows, *cols;
1554: PetscFunctionBeginHot;
1556: PetscCall(ISGetIndices(ism, &rows));
1557: PetscCall(ISGetIndices(isn, &cols));
1558: PetscCall(ISGetLocalSize(ism, &m));
1559: PetscCall(ISGetLocalSize(isn, &n));
1560: PetscCall(MatSetValues(mat, m, rows, n, cols, v, addv));
1561: PetscCall(ISRestoreIndices(ism, &rows));
1562: PetscCall(ISRestoreIndices(isn, &cols));
1563: PetscFunctionReturn(PETSC_SUCCESS);
1564: }
1566: /*@
1567: MatSetValuesRowLocal - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1568: values into a matrix
1570: Not Collective
1572: Input Parameters:
1573: + mat - the matrix
1574: . row - the (block) row to set
1575: - v - a logically two-dimensional array of values
1577: Level: intermediate
1579: Notes:
1580: The values, `v`, are column-oriented (for the block version) and sorted
1582: All the nonzero values in `row` must be provided
1584: The matrix must have previously had its column indices set, likely by having been assembled.
1586: `row` must belong to this MPI process
1588: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1589: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1590: @*/
1591: PetscErrorCode MatSetValuesRowLocal(Mat mat, PetscInt row, const PetscScalar v[])
1592: {
1593: PetscInt globalrow;
1595: PetscFunctionBegin;
1598: PetscAssertPointer(v, 3);
1599: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, 1, &row, &globalrow));
1600: PetscCall(MatSetValuesRow(mat, globalrow, v));
1601: PetscFunctionReturn(PETSC_SUCCESS);
1602: }
1604: /*@
1605: MatSetValuesRow - Inserts a row (block row for `MATBAIJ` matrices) of nonzero
1606: values into a matrix
1608: Not Collective
1610: Input Parameters:
1611: + mat - the matrix
1612: . row - the (block) row to set
1613: - v - a logically two-dimensional (column major) array of values for block matrices with blocksize larger than one, otherwise a one dimensional array of values
1615: Level: advanced
1617: Notes:
1618: The values, `v`, are column-oriented for the block version.
1620: All the nonzeros in `row` must be provided
1622: THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually `MatSetValues()` is used.
1624: `row` must belong to this process
1626: .seealso: [](ch_matrices), `Mat`, `MatSetValues()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1627: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1628: @*/
1629: PetscErrorCode MatSetValuesRow(Mat mat, PetscInt row, const PetscScalar v[])
1630: {
1631: PetscFunctionBeginHot;
1634: MatCheckPreallocated(mat, 1);
1635: PetscAssertPointer(v, 3);
1636: PetscCheck(mat->insertmode != ADD_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add and insert values");
1637: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
1638: mat->insertmode = INSERT_VALUES;
1640: if (mat->assembled) {
1641: mat->was_assembled = PETSC_TRUE;
1642: mat->assembled = PETSC_FALSE;
1643: }
1644: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
1645: PetscUseTypeMethod(mat, setvaluesrow, row, v);
1646: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
1647: PetscFunctionReturn(PETSC_SUCCESS);
1648: }
1650: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
1651: /*@
1652: MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1653: Using structured grid indexing
1655: Not Collective
1657: Input Parameters:
1658: + mat - the matrix
1659: . m - number of rows being entered
1660: . idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1661: . n - number of columns being entered
1662: . idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1663: . v - a logically two-dimensional array of values
1664: - addv - either `ADD_VALUES` to add to existing entries at that location or `INSERT_VALUES` to replace existing entries with new values
1666: Level: beginner
1668: Notes:
1669: By default the values, `v`, are row-oriented. See `MatSetOption()` for other options.
1671: Calls to `MatSetValuesStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1672: options cannot be mixed without intervening calls to the assembly
1673: routines.
1675: The grid coordinates are across the entire grid, not just the local portion
1677: `MatSetValuesStencil()` uses 0-based row and column numbers in Fortran
1678: as well as in C.
1680: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1682: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1683: or call `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1685: The columns and rows in the stencil passed in MUST be contained within the
1686: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1687: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1688: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1689: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1691: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1692: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1693: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1694: `DM_BOUNDARY_PERIODIC` boundary type.
1696: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1697: a single value per point) you can skip filling those indices.
1699: Inspired by the structured grid interface to the HYPRE package
1700: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1702: Efficiency Alert:
1703: The routine `MatSetValuesBlockedStencil()` may offer much better efficiency
1704: for users of block sparse formats (`MATSEQBAIJ` and `MATMPIBAIJ`).
1706: Fortran Note:
1707: `idxm` and `idxn` should be declared as
1708: $ MatStencil idxm(4,m),idxn(4,n)
1709: and the values inserted using
1710: .vb
1711: idxm(MatStencil_i,1) = i
1712: idxm(MatStencil_j,1) = j
1713: idxm(MatStencil_k,1) = k
1714: idxm(MatStencil_c,1) = c
1715: etc
1716: .ve
1718: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1719: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1720: @*/
1721: PetscErrorCode MatSetValuesStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1722: {
1723: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1724: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1725: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1727: PetscFunctionBegin;
1728: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1731: PetscAssertPointer(idxm, 3);
1732: PetscAssertPointer(idxn, 5);
1734: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1735: jdxm = buf;
1736: jdxn = buf + m;
1737: } else {
1738: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1739: jdxm = bufm;
1740: jdxn = bufn;
1741: }
1742: for (i = 0; i < m; i++) {
1743: for (j = 0; j < 3 - sdim; j++) dxm++;
1744: tmp = *dxm++ - starts[0];
1745: for (j = 0; j < dim - 1; j++) {
1746: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1747: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1748: }
1749: if (mat->stencil.noc) dxm++;
1750: jdxm[i] = tmp;
1751: }
1752: for (i = 0; i < n; i++) {
1753: for (j = 0; j < 3 - sdim; j++) dxn++;
1754: tmp = *dxn++ - starts[0];
1755: for (j = 0; j < dim - 1; j++) {
1756: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1757: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1758: }
1759: if (mat->stencil.noc) dxn++;
1760: jdxn[i] = tmp;
1761: }
1762: PetscCall(MatSetValuesLocal(mat, m, jdxm, n, jdxn, v, addv));
1763: PetscCall(PetscFree2(bufm, bufn));
1764: PetscFunctionReturn(PETSC_SUCCESS);
1765: }
1767: /*@
1768: MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1769: Using structured grid indexing
1771: Not Collective
1773: Input Parameters:
1774: + mat - the matrix
1775: . m - number of rows being entered
1776: . idxm - grid coordinates for matrix rows being entered
1777: . n - number of columns being entered
1778: . idxn - grid coordinates for matrix columns being entered
1779: . v - a logically two-dimensional array of values
1780: - addv - either `ADD_VALUES` to add to existing entries or `INSERT_VALUES` to replace existing entries with new values
1782: Level: beginner
1784: Notes:
1785: By default the values, `v`, are row-oriented and unsorted.
1786: See `MatSetOption()` for other options.
1788: Calls to `MatSetValuesBlockedStencil()` with the `INSERT_VALUES` and `ADD_VALUES`
1789: options cannot be mixed without intervening calls to the assembly
1790: routines.
1792: The grid coordinates are across the entire grid, not just the local portion
1794: `MatSetValuesBlockedStencil()` uses 0-based row and column numbers in Fortran
1795: as well as in C.
1797: For setting/accessing vector values via array coordinates you can use the `DMDAVecGetArray()` routine
1799: In order to use this routine you must either obtain the matrix with `DMCreateMatrix()`
1800: or call `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()` and `MatSetStencil()` first.
1802: The columns and rows in the stencil passed in MUST be contained within the
1803: ghost region of the given process as set with DMDACreateXXX() or `MatSetStencil()`. For example,
1804: if you create a `DMDA` with an overlap of one grid level and on a particular process its first
1805: local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1806: first i index you can use in your column and row indices in `MatSetStencil()` is 5.
1808: Negative indices may be passed in idxm and idxn, these rows and columns are
1809: simply ignored. This allows easily inserting element stiffness matrices
1810: with homogeneous Dirichlet boundary conditions that you don't want represented
1811: in the matrix.
1813: Inspired by the structured grid interface to the HYPRE package
1814: (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1816: Fortran Note:
1817: `idxm` and `idxn` should be declared as
1818: $ MatStencil idxm(4,m),idxn(4,n)
1819: and the values inserted using
1820: .vb
1821: idxm(MatStencil_i,1) = i
1822: idxm(MatStencil_j,1) = j
1823: idxm(MatStencil_k,1) = k
1824: etc
1825: .ve
1827: .seealso: [](ch_matrices), `Mat`, `DMDA`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1828: `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1829: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1830: @*/
1831: PetscErrorCode MatSetValuesBlockedStencil(Mat mat, PetscInt m, const MatStencil idxm[], PetscInt n, const MatStencil idxn[], const PetscScalar v[], InsertMode addv)
1832: {
1833: PetscInt buf[8192], *bufm = NULL, *bufn = NULL, *jdxm, *jdxn;
1834: PetscInt j, i, dim = mat->stencil.dim, *dims = mat->stencil.dims + 1, tmp;
1835: PetscInt *starts = mat->stencil.starts, *dxm = (PetscInt *)idxm, *dxn = (PetscInt *)idxn, sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1837: PetscFunctionBegin;
1838: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1841: PetscAssertPointer(idxm, 3);
1842: PetscAssertPointer(idxn, 5);
1843: PetscAssertPointer(v, 6);
1845: if ((m + n) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
1846: jdxm = buf;
1847: jdxn = buf + m;
1848: } else {
1849: PetscCall(PetscMalloc2(m, &bufm, n, &bufn));
1850: jdxm = bufm;
1851: jdxn = bufn;
1852: }
1853: for (i = 0; i < m; i++) {
1854: for (j = 0; j < 3 - sdim; j++) dxm++;
1855: tmp = *dxm++ - starts[0];
1856: for (j = 0; j < sdim - 1; j++) {
1857: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1858: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
1859: }
1860: dxm++;
1861: jdxm[i] = tmp;
1862: }
1863: for (i = 0; i < n; i++) {
1864: for (j = 0; j < 3 - sdim; j++) dxn++;
1865: tmp = *dxn++ - starts[0];
1866: for (j = 0; j < sdim - 1; j++) {
1867: if ((*dxn++ - starts[j + 1]) < 0 || tmp < 0) tmp = -1;
1868: else tmp = tmp * dims[j] + *(dxn - 1) - starts[j + 1];
1869: }
1870: dxn++;
1871: jdxn[i] = tmp;
1872: }
1873: PetscCall(MatSetValuesBlockedLocal(mat, m, jdxm, n, jdxn, v, addv));
1874: PetscCall(PetscFree2(bufm, bufn));
1875: PetscFunctionReturn(PETSC_SUCCESS);
1876: }
1878: /*@
1879: MatSetStencil - Sets the grid information for setting values into a matrix via
1880: `MatSetValuesStencil()`
1882: Not Collective
1884: Input Parameters:
1885: + mat - the matrix
1886: . dim - dimension of the grid 1, 2, or 3
1887: . dims - number of grid points in x, y, and z direction, including ghost points on your processor
1888: . starts - starting point of ghost nodes on your processor in x, y, and z direction
1889: - dof - number of degrees of freedom per node
1891: Level: beginner
1893: Notes:
1894: Inspired by the structured grid interface to the HYPRE package
1895: (www.llnl.gov/CASC/hyper)
1897: For matrices generated with `DMCreateMatrix()` this routine is automatically called and so not needed by the
1898: user.
1900: .seealso: [](ch_matrices), `Mat`, `MatStencil`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1901: `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1902: @*/
1903: PetscErrorCode MatSetStencil(Mat mat, PetscInt dim, const PetscInt dims[], const PetscInt starts[], PetscInt dof)
1904: {
1905: PetscFunctionBegin;
1907: PetscAssertPointer(dims, 3);
1908: PetscAssertPointer(starts, 4);
1910: mat->stencil.dim = dim + (dof > 1);
1911: for (PetscInt i = 0; i < dim; i++) {
1912: mat->stencil.dims[i] = dims[dim - i - 1]; /* copy the values in backwards */
1913: mat->stencil.starts[i] = starts[dim - i - 1];
1914: }
1915: mat->stencil.dims[dim] = dof;
1916: mat->stencil.starts[dim] = 0;
1917: mat->stencil.noc = (PetscBool)(dof == 1);
1918: PetscFunctionReturn(PETSC_SUCCESS);
1919: }
1921: /*@C
1922: MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1924: Not Collective
1926: Input Parameters:
1927: + mat - the matrix
1928: . v - a logically two-dimensional array of values
1929: . m - the number of block rows
1930: . idxm - the global block indices
1931: . n - the number of block columns
1932: . idxn - the global block indices
1933: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` replaces existing entries with new values
1935: Level: intermediate
1937: Notes:
1938: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call
1939: MatXXXXSetPreallocation() or `MatSetUp()` before using this routine.
1941: The `m` and `n` count the NUMBER of blocks in the row direction and column direction,
1942: NOT the total number of rows/columns; for example, if the block size is 2 and
1943: you are passing in values for rows 2,3,4,5 then m would be 2 (not 4).
1944: The values in idxm would be 1 2; that is the first index for each block divided by
1945: the block size.
1947: You must call `MatSetBlockSize()` when constructing this matrix (before
1948: preallocating it).
1950: By default the values, `v`, are row-oriented, so the layout of
1951: `v` is the same as for `MatSetValues()`. See `MatSetOption()` for other options.
1953: Calls to `MatSetValuesBlocked()` with the `INSERT_VALUES` and `ADD_VALUES`
1954: options cannot be mixed without intervening calls to the assembly
1955: routines.
1957: `MatSetValuesBlocked()` uses 0-based row and column numbers in Fortran
1958: as well as in C.
1960: Negative indices may be passed in `idxm` and `idxn`, these rows and columns are
1961: simply ignored. This allows easily inserting element stiffness matrices
1962: with homogeneous Dirichlet boundary conditions that you don't want represented
1963: in the matrix.
1965: Each time an entry is set within a sparse matrix via `MatSetValues()`,
1966: internal searching must be done to determine where to place the
1967: data in the matrix storage space. By instead inserting blocks of
1968: entries via `MatSetValuesBlocked()`, the overhead of matrix assembly is
1969: reduced.
1971: Example:
1972: .vb
1973: Suppose m=n=2 and block size(bs) = 2 The array is
1975: 1 2 | 3 4
1976: 5 6 | 7 8
1977: - - - | - - -
1978: 9 10 | 11 12
1979: 13 14 | 15 16
1981: v[] should be passed in like
1982: v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1984: If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1985: v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1986: .ve
1988: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
1989: @*/
1990: PetscErrorCode MatSetValuesBlocked(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], const PetscScalar v[], InsertMode addv)
1991: {
1992: PetscFunctionBeginHot;
1995: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
1996: PetscAssertPointer(idxm, 3);
1997: PetscAssertPointer(idxn, 5);
1998: MatCheckPreallocated(mat, 1);
1999: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2000: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2001: if (PetscDefined(USE_DEBUG)) {
2002: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2003: PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2004: }
2005: if (PetscDefined(USE_DEBUG)) {
2006: PetscInt rbs, cbs, M, N, i;
2007: PetscCall(MatGetBlockSizes(mat, &rbs, &cbs));
2008: PetscCall(MatGetSize(mat, &M, &N));
2009: for (i = 0; i < m; i++) PetscCheck(idxm[i] * rbs < M, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT, i, idxm[i], M);
2010: for (i = 0; i < n; i++) PetscCheck(idxn[i] * cbs < N, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT, i, idxn[i], N);
2011: }
2012: if (mat->assembled) {
2013: mat->was_assembled = PETSC_TRUE;
2014: mat->assembled = PETSC_FALSE;
2015: }
2016: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2017: if (mat->ops->setvaluesblocked) {
2018: PetscUseTypeMethod(mat, setvaluesblocked, m, idxm, n, idxn, v, addv);
2019: } else {
2020: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *iidxm, *iidxn;
2021: PetscInt i, j, bs, cbs;
2023: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
2024: if ((m * bs + n * cbs) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2025: iidxm = buf;
2026: iidxn = buf + m * bs;
2027: } else {
2028: PetscCall(PetscMalloc2(m * bs, &bufr, n * cbs, &bufc));
2029: iidxm = bufr;
2030: iidxn = bufc;
2031: }
2032: for (i = 0; i < m; i++) {
2033: for (j = 0; j < bs; j++) iidxm[i * bs + j] = bs * idxm[i] + j;
2034: }
2035: if (m != n || bs != cbs || idxm != idxn) {
2036: for (i = 0; i < n; i++) {
2037: for (j = 0; j < cbs; j++) iidxn[i * cbs + j] = cbs * idxn[i] + j;
2038: }
2039: } else iidxn = iidxm;
2040: PetscCall(MatSetValues(mat, m * bs, iidxm, n * cbs, iidxn, v, addv));
2041: PetscCall(PetscFree2(bufr, bufc));
2042: }
2043: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2044: PetscFunctionReturn(PETSC_SUCCESS);
2045: }
2047: /*@C
2048: MatGetValues - Gets a block of local values from a matrix.
2050: Not Collective; can only return values that are owned by the give process
2052: Input Parameters:
2053: + mat - the matrix
2054: . v - a logically two-dimensional array for storing the values
2055: . m - the number of rows
2056: . idxm - the global indices of the rows
2057: . n - the number of columns
2058: - idxn - the global indices of the columns
2060: Level: advanced
2062: Notes:
2063: The user must allocate space (m*n `PetscScalar`s) for the values, `v`.
2064: The values, `v`, are then returned in a row-oriented format,
2065: analogous to that used by default in `MatSetValues()`.
2067: `MatGetValues()` uses 0-based row and column numbers in
2068: Fortran as well as in C.
2070: `MatGetValues()` requires that the matrix has been assembled
2071: with `MatAssemblyBegin()`/`MatAssemblyEnd()`. Thus, calls to
2072: `MatSetValues()` and `MatGetValues()` CANNOT be made in succession
2073: without intermediate matrix assembly.
2075: Negative row or column indices will be ignored and those locations in `v` will be
2076: left unchanged.
2078: For the standard row-based matrix formats, `idxm` can only contain rows owned by the requesting MPI process.
2079: That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
2080: from `MatGetOwnershipRange`(mat,&rstart,&rend).
2082: .seealso: [](ch_matrices), `Mat`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
2083: @*/
2084: PetscErrorCode MatGetValues(Mat mat, PetscInt m, const PetscInt idxm[], PetscInt n, const PetscInt idxn[], PetscScalar v[])
2085: {
2086: PetscFunctionBegin;
2089: if (!m || !n) PetscFunctionReturn(PETSC_SUCCESS);
2090: PetscAssertPointer(idxm, 3);
2091: PetscAssertPointer(idxn, 5);
2092: PetscAssertPointer(v, 6);
2093: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2094: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2095: MatCheckPreallocated(mat, 1);
2097: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2098: PetscUseTypeMethod(mat, getvalues, m, idxm, n, idxn, v);
2099: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2100: PetscFunctionReturn(PETSC_SUCCESS);
2101: }
2103: /*@C
2104: MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
2105: defined previously by `MatSetLocalToGlobalMapping()`
2107: Not Collective
2109: Input Parameters:
2110: + mat - the matrix
2111: . nrow - number of rows
2112: . irow - the row local indices
2113: . ncol - number of columns
2114: - icol - the column local indices
2116: Output Parameter:
2117: . y - a logically two-dimensional array of values
2119: Level: advanced
2121: Notes:
2122: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine.
2124: This routine can only return values that are owned by the requesting MPI process. That is, for standard matrix formats, rows that, in the global numbering,
2125: are greater than or equal to rstart and less than rend where rstart and rend are obtainable from `MatGetOwnershipRange`(mat,&rstart,&rend). One can
2126: determine if the resulting global row associated with the local row r is owned by the requesting MPI process by applying the `ISLocalToGlobalMapping` set
2127: with `MatSetLocalToGlobalMapping()`.
2129: Developer Note:
2130: This is labelled with C so does not automatically generate Fortran stubs and interfaces
2131: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2133: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2134: `MatSetValuesLocal()`, `MatGetValues()`
2135: @*/
2136: PetscErrorCode MatGetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], PetscScalar y[])
2137: {
2138: PetscFunctionBeginHot;
2141: MatCheckPreallocated(mat, 1);
2142: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to retrieve */
2143: PetscAssertPointer(irow, 3);
2144: PetscAssertPointer(icol, 5);
2145: if (PetscDefined(USE_DEBUG)) {
2146: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2147: PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2148: }
2149: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2150: PetscCall(PetscLogEventBegin(MAT_GetValues, mat, 0, 0, 0));
2151: if (mat->ops->getvalueslocal) PetscUseTypeMethod(mat, getvalueslocal, nrow, irow, ncol, icol, y);
2152: else {
2153: PetscInt buf[8192], *bufr = NULL, *bufc = NULL, *irowm, *icolm;
2154: if ((nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2155: irowm = buf;
2156: icolm = buf + nrow;
2157: } else {
2158: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2159: irowm = bufr;
2160: icolm = bufc;
2161: }
2162: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2163: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2164: PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, irowm));
2165: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, icolm));
2166: PetscCall(MatGetValues(mat, nrow, irowm, ncol, icolm, y));
2167: PetscCall(PetscFree2(bufr, bufc));
2168: }
2169: PetscCall(PetscLogEventEnd(MAT_GetValues, mat, 0, 0, 0));
2170: PetscFunctionReturn(PETSC_SUCCESS);
2171: }
2173: /*@
2174: MatSetValuesBatch - Adds (`ADD_VALUES`) many blocks of values into a matrix at once. The blocks must all be square and
2175: the same size. Currently, this can only be called once and creates the given matrix.
2177: Not Collective
2179: Input Parameters:
2180: + mat - the matrix
2181: . nb - the number of blocks
2182: . bs - the number of rows (and columns) in each block
2183: . rows - a concatenation of the rows for each block
2184: - v - a concatenation of logically two-dimensional arrays of values
2186: Level: advanced
2188: Notes:
2189: `MatSetPreallocationCOO()` and `MatSetValuesCOO()` may be a better way to provide the values
2191: In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2193: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2194: `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetPreallocationCOO()`, `MatSetValuesCOO()`
2195: @*/
2196: PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2197: {
2198: PetscFunctionBegin;
2201: PetscAssertPointer(rows, 4);
2202: PetscAssertPointer(v, 5);
2203: PetscAssert(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2205: PetscCall(PetscLogEventBegin(MAT_SetValuesBatch, mat, 0, 0, 0));
2206: if (mat->ops->setvaluesbatch) PetscUseTypeMethod(mat, setvaluesbatch, nb, bs, rows, v);
2207: else {
2208: for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b * bs], bs, &rows[b * bs], &v[b * bs * bs], ADD_VALUES));
2209: }
2210: PetscCall(PetscLogEventEnd(MAT_SetValuesBatch, mat, 0, 0, 0));
2211: PetscFunctionReturn(PETSC_SUCCESS);
2212: }
2214: /*@
2215: MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2216: the routine `MatSetValuesLocal()` to allow users to insert matrix entries
2217: using a local (per-processor) numbering.
2219: Not Collective
2221: Input Parameters:
2222: + x - the matrix
2223: . rmapping - row mapping created with `ISLocalToGlobalMappingCreate()` or `ISLocalToGlobalMappingCreateIS()`
2224: - cmapping - column mapping
2226: Level: intermediate
2228: Note:
2229: If the matrix is obtained with `DMCreateMatrix()` then this may already have been called on the matrix
2231: .seealso: [](ch_matrices), `Mat`, `DM`, `DMCreateMatrix()`, `MatGetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2232: @*/
2233: PetscErrorCode MatSetLocalToGlobalMapping(Mat x, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2234: {
2235: PetscFunctionBegin;
2240: if (x->ops->setlocaltoglobalmapping) PetscUseTypeMethod(x, setlocaltoglobalmapping, rmapping, cmapping);
2241: else {
2242: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap, rmapping));
2243: PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap, cmapping));
2244: }
2245: PetscFunctionReturn(PETSC_SUCCESS);
2246: }
2248: /*@
2249: MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by `MatSetLocalToGlobalMapping()`
2251: Not Collective
2253: Input Parameter:
2254: . A - the matrix
2256: Output Parameters:
2257: + rmapping - row mapping
2258: - cmapping - column mapping
2260: Level: advanced
2262: .seealso: [](ch_matrices), `Mat`, `MatSetLocalToGlobalMapping()`, `MatSetValuesLocal()`
2263: @*/
2264: PetscErrorCode MatGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
2265: {
2266: PetscFunctionBegin;
2269: if (rmapping) {
2270: PetscAssertPointer(rmapping, 2);
2271: *rmapping = A->rmap->mapping;
2272: }
2273: if (cmapping) {
2274: PetscAssertPointer(cmapping, 3);
2275: *cmapping = A->cmap->mapping;
2276: }
2277: PetscFunctionReturn(PETSC_SUCCESS);
2278: }
2280: /*@
2281: MatSetLayouts - Sets the `PetscLayout` objects for rows and columns of a matrix
2283: Logically Collective
2285: Input Parameters:
2286: + A - the matrix
2287: . rmap - row layout
2288: - cmap - column layout
2290: Level: advanced
2292: Note:
2293: The `PetscLayout` objects are usually created automatically for the matrix so this routine rarely needs to be called.
2295: .seealso: [](ch_matrices), `Mat`, `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2296: @*/
2297: PetscErrorCode MatSetLayouts(Mat A, PetscLayout rmap, PetscLayout cmap)
2298: {
2299: PetscFunctionBegin;
2301: PetscCall(PetscLayoutReference(rmap, &A->rmap));
2302: PetscCall(PetscLayoutReference(cmap, &A->cmap));
2303: PetscFunctionReturn(PETSC_SUCCESS);
2304: }
2306: /*@
2307: MatGetLayouts - Gets the `PetscLayout` objects for rows and columns
2309: Not Collective
2311: Input Parameter:
2312: . A - the matrix
2314: Output Parameters:
2315: + rmap - row layout
2316: - cmap - column layout
2318: Level: advanced
2320: .seealso: [](ch_matrices), `Mat`, [Matrix Layouts](sec_matlayout), `PetscLayout`, `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2321: @*/
2322: PetscErrorCode MatGetLayouts(Mat A, PetscLayout *rmap, PetscLayout *cmap)
2323: {
2324: PetscFunctionBegin;
2327: if (rmap) {
2328: PetscAssertPointer(rmap, 2);
2329: *rmap = A->rmap;
2330: }
2331: if (cmap) {
2332: PetscAssertPointer(cmap, 3);
2333: *cmap = A->cmap;
2334: }
2335: PetscFunctionReturn(PETSC_SUCCESS);
2336: }
2338: /*@C
2339: MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2340: using a local numbering of the rows and columns.
2342: Not Collective
2344: Input Parameters:
2345: + mat - the matrix
2346: . nrow - number of rows
2347: . irow - the row local indices
2348: . ncol - number of columns
2349: . icol - the column local indices
2350: . y - a logically two-dimensional array of values
2351: - addv - either `INSERT_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2353: Level: intermediate
2355: Notes:
2356: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetLocalToGlobalMapping()` before using this routine
2358: Calls to `MatSetValuesLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2359: options cannot be mixed without intervening calls to the assembly
2360: routines.
2362: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2363: MUST be called after all calls to `MatSetValuesLocal()` have been completed.
2365: Developer Note:
2366: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2367: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2369: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2370: `MatGetValuesLocal()`
2371: @*/
2372: PetscErrorCode MatSetValuesLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2373: {
2374: PetscFunctionBeginHot;
2377: MatCheckPreallocated(mat, 1);
2378: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2379: PetscAssertPointer(irow, 3);
2380: PetscAssertPointer(icol, 5);
2381: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2382: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2383: if (PetscDefined(USE_DEBUG)) {
2384: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2385: PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2386: }
2388: if (mat->assembled) {
2389: mat->was_assembled = PETSC_TRUE;
2390: mat->assembled = PETSC_FALSE;
2391: }
2392: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2393: if (mat->ops->setvalueslocal) PetscUseTypeMethod(mat, setvalueslocal, nrow, irow, ncol, icol, y, addv);
2394: else {
2395: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2396: const PetscInt *irowm, *icolm;
2398: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= (PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf)) {
2399: bufr = buf;
2400: bufc = buf + nrow;
2401: irowm = bufr;
2402: icolm = bufc;
2403: } else {
2404: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2405: irowm = bufr;
2406: icolm = bufc;
2407: }
2408: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping, nrow, irow, bufr));
2409: else irowm = irow;
2410: if (mat->cmap->mapping) {
2411: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2412: PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping, ncol, icol, bufc));
2413: } else icolm = irowm;
2414: } else icolm = icol;
2415: PetscCall(MatSetValues(mat, nrow, irowm, ncol, icolm, y, addv));
2416: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2417: }
2418: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2419: PetscFunctionReturn(PETSC_SUCCESS);
2420: }
2422: /*@C
2423: MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2424: using a local ordering of the nodes a block at a time.
2426: Not Collective
2428: Input Parameters:
2429: + mat - the matrix
2430: . nrow - number of rows
2431: . irow - the row local indices
2432: . ncol - number of columns
2433: . icol - the column local indices
2434: . y - a logically two-dimensional array of values
2435: - addv - either `ADD_VALUES` to add values to any existing entries, or `INSERT_VALUES` to replace existing entries with new values
2437: Level: intermediate
2439: Notes:
2440: If you create the matrix yourself (that is not with a call to `DMCreateMatrix()`) then you MUST call `MatSetBlockSize()` and `MatSetLocalToGlobalMapping()`
2441: before using this routineBefore calling `MatSetValuesLocal()`, the user must first set the
2443: Calls to `MatSetValuesBlockedLocal()` with the `INSERT_VALUES` and `ADD_VALUES`
2444: options cannot be mixed without intervening calls to the assembly
2445: routines.
2447: These values may be cached, so `MatAssemblyBegin()` and `MatAssemblyEnd()`
2448: MUST be called after all calls to `MatSetValuesBlockedLocal()` have been completed.
2450: Developer Note:
2451: This is labeled with C so does not automatically generate Fortran stubs and interfaces
2452: because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2454: .seealso: [](ch_matrices), `Mat`, `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2455: `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2456: @*/
2457: PetscErrorCode MatSetValuesBlockedLocal(Mat mat, PetscInt nrow, const PetscInt irow[], PetscInt ncol, const PetscInt icol[], const PetscScalar y[], InsertMode addv)
2458: {
2459: PetscFunctionBeginHot;
2462: MatCheckPreallocated(mat, 1);
2463: if (!nrow || !ncol) PetscFunctionReturn(PETSC_SUCCESS); /* no values to insert */
2464: PetscAssertPointer(irow, 3);
2465: PetscAssertPointer(icol, 5);
2466: if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2467: else PetscCheck(mat->insertmode == addv, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Cannot mix add values and insert values");
2468: if (PetscDefined(USE_DEBUG)) {
2469: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2470: PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues, PETSC_COMM_SELF, PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2471: }
2473: if (mat->assembled) {
2474: mat->was_assembled = PETSC_TRUE;
2475: mat->assembled = PETSC_FALSE;
2476: }
2477: if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2478: PetscInt irbs, rbs;
2479: PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2480: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &irbs));
2481: PetscCheck(rbs == irbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT, rbs, irbs);
2482: }
2483: if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2484: PetscInt icbs, cbs;
2485: PetscCall(MatGetBlockSizes(mat, NULL, &cbs));
2486: PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &icbs));
2487: PetscCheck(cbs == icbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT, cbs, icbs);
2488: }
2489: PetscCall(PetscLogEventBegin(MAT_SetValues, mat, 0, 0, 0));
2490: if (mat->ops->setvaluesblockedlocal) PetscUseTypeMethod(mat, setvaluesblockedlocal, nrow, irow, ncol, icol, y, addv);
2491: else {
2492: PetscInt buf[8192], *bufr = NULL, *bufc = NULL;
2493: const PetscInt *irowm, *icolm;
2495: if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow + ncol) <= ((PetscInt)PETSC_STATIC_ARRAY_LENGTH(buf))) {
2496: bufr = buf;
2497: bufc = buf + nrow;
2498: irowm = bufr;
2499: icolm = bufc;
2500: } else {
2501: PetscCall(PetscMalloc2(nrow, &bufr, ncol, &bufc));
2502: irowm = bufr;
2503: icolm = bufc;
2504: }
2505: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping, nrow, irow, bufr));
2506: else irowm = irow;
2507: if (mat->cmap->mapping) {
2508: if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2509: PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping, ncol, icol, bufc));
2510: } else icolm = irowm;
2511: } else icolm = icol;
2512: PetscCall(MatSetValuesBlocked(mat, nrow, irowm, ncol, icolm, y, addv));
2513: if (bufr != buf) PetscCall(PetscFree2(bufr, bufc));
2514: }
2515: PetscCall(PetscLogEventEnd(MAT_SetValues, mat, 0, 0, 0));
2516: PetscFunctionReturn(PETSC_SUCCESS);
2517: }
2519: /*@
2520: MatMultDiagonalBlock - Computes the matrix-vector product, $y = Dx$. Where `D` is defined by the inode or block structure of the diagonal
2522: Collective
2524: Input Parameters:
2525: + mat - the matrix
2526: - x - the vector to be multiplied
2528: Output Parameter:
2529: . y - the result
2531: Level: developer
2533: Note:
2534: The vectors `x` and `y` cannot be the same. I.e., one cannot
2535: call `MatMultDiagonalBlock`(A,y,y).
2537: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2538: @*/
2539: PetscErrorCode MatMultDiagonalBlock(Mat mat, Vec x, Vec y)
2540: {
2541: PetscFunctionBegin;
2547: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2548: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2549: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2550: MatCheckPreallocated(mat, 1);
2552: PetscUseTypeMethod(mat, multdiagonalblock, x, y);
2553: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2554: PetscFunctionReturn(PETSC_SUCCESS);
2555: }
2557: /*@
2558: MatMult - Computes the matrix-vector product, $y = Ax$.
2560: Neighbor-wise Collective
2562: Input Parameters:
2563: + mat - the matrix
2564: - x - the vector to be multiplied
2566: Output Parameter:
2567: . y - the result
2569: Level: beginner
2571: Note:
2572: The vectors `x` and `y` cannot be the same. I.e., one cannot
2573: call `MatMult`(A,y,y).
2575: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2576: @*/
2577: PetscErrorCode MatMult(Mat mat, Vec x, Vec y)
2578: {
2579: PetscFunctionBegin;
2583: VecCheckAssembled(x);
2585: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2586: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2587: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2588: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
2589: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
2590: PetscCheck(mat->cmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, x->map->n);
2591: PetscCheck(mat->rmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, y->map->n);
2592: PetscCall(VecSetErrorIfLocked(y, 3));
2593: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2594: MatCheckPreallocated(mat, 1);
2596: PetscCall(VecLockReadPush(x));
2597: PetscCall(PetscLogEventBegin(MAT_Mult, mat, x, y, 0));
2598: PetscUseTypeMethod(mat, mult, x, y);
2599: PetscCall(PetscLogEventEnd(MAT_Mult, mat, x, y, 0));
2600: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2601: PetscCall(VecLockReadPop(x));
2602: PetscFunctionReturn(PETSC_SUCCESS);
2603: }
2605: /*@
2606: MatMultTranspose - Computes matrix transpose times a vector $y = A^T * x$.
2608: Neighbor-wise Collective
2610: Input Parameters:
2611: + mat - the matrix
2612: - x - the vector to be multiplied
2614: Output Parameter:
2615: . y - the result
2617: Level: beginner
2619: Notes:
2620: The vectors `x` and `y` cannot be the same. I.e., one cannot
2621: call `MatMultTranspose`(A,y,y).
2623: For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2624: use `MatMultHermitianTranspose()`
2626: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2627: @*/
2628: PetscErrorCode MatMultTranspose(Mat mat, Vec x, Vec y)
2629: {
2630: PetscErrorCode (*op)(Mat, Vec, Vec) = NULL;
2632: PetscFunctionBegin;
2636: VecCheckAssembled(x);
2639: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2640: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2641: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2642: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2643: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2644: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2645: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2646: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(x, 2, PETSC_TRUE));
2647: MatCheckPreallocated(mat, 1);
2649: if (!mat->ops->multtranspose) {
2650: if (mat->symmetric == PETSC_BOOL3_TRUE && mat->ops->mult) op = mat->ops->mult;
2651: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined", ((PetscObject)mat)->type_name);
2652: } else op = mat->ops->multtranspose;
2653: PetscCall(PetscLogEventBegin(MAT_MultTranspose, mat, x, y, 0));
2654: PetscCall(VecLockReadPush(x));
2655: PetscCall((*op)(mat, x, y));
2656: PetscCall(VecLockReadPop(x));
2657: PetscCall(PetscLogEventEnd(MAT_MultTranspose, mat, x, y, 0));
2658: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2659: if (mat->erroriffailure) PetscCall(VecValidValues_Internal(y, 3, PETSC_FALSE));
2660: PetscFunctionReturn(PETSC_SUCCESS);
2661: }
2663: /*@
2664: MatMultHermitianTranspose - Computes matrix Hermitian-transpose times a vector $y = A^H * x$.
2666: Neighbor-wise Collective
2668: Input Parameters:
2669: + mat - the matrix
2670: - x - the vector to be multiplied
2672: Output Parameter:
2673: . y - the result
2675: Level: beginner
2677: Notes:
2678: The vectors `x` and `y` cannot be the same. I.e., one cannot
2679: call `MatMultHermitianTranspose`(A,y,y).
2681: Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2683: For real numbers `MatMultTranspose()` and `MatMultHermitianTranspose()` are identical.
2685: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2686: @*/
2687: PetscErrorCode MatMultHermitianTranspose(Mat mat, Vec x, Vec y)
2688: {
2689: PetscFunctionBegin;
2695: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2696: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2697: PetscCheck(x != y, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "x and y must be different vectors");
2698: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
2699: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
2700: PetscCheck(mat->cmap->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->n, y->map->n);
2701: PetscCheck(mat->rmap->n == x->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, x->map->n);
2702: MatCheckPreallocated(mat, 1);
2704: PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose, mat, x, y, 0));
2705: #if defined(PETSC_USE_COMPLEX)
2706: if (mat->ops->multhermitiantranspose || (mat->hermitian == PETSC_BOOL3_TRUE && mat->ops->mult)) {
2707: PetscCall(VecLockReadPush(x));
2708: if (mat->ops->multhermitiantranspose) PetscUseTypeMethod(mat, multhermitiantranspose, x, y);
2709: else PetscUseTypeMethod(mat, mult, x, y);
2710: PetscCall(VecLockReadPop(x));
2711: } else {
2712: Vec w;
2713: PetscCall(VecDuplicate(x, &w));
2714: PetscCall(VecCopy(x, w));
2715: PetscCall(VecConjugate(w));
2716: PetscCall(MatMultTranspose(mat, w, y));
2717: PetscCall(VecDestroy(&w));
2718: PetscCall(VecConjugate(y));
2719: }
2720: PetscCall(PetscObjectStateIncrease((PetscObject)y));
2721: #else
2722: PetscCall(MatMultTranspose(mat, x, y));
2723: #endif
2724: PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose, mat, x, y, 0));
2725: PetscFunctionReturn(PETSC_SUCCESS);
2726: }
2728: /*@
2729: MatMultAdd - Computes $v3 = v2 + A * v1$.
2731: Neighbor-wise Collective
2733: Input Parameters:
2734: + mat - the matrix
2735: . v1 - the vector to be multiplied by `mat`
2736: - v2 - the vector to be added to the result
2738: Output Parameter:
2739: . v3 - the result
2741: Level: beginner
2743: Note:
2744: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2745: call `MatMultAdd`(A,v1,v2,v1).
2747: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2748: @*/
2749: PetscErrorCode MatMultAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2750: {
2751: PetscFunctionBegin;
2758: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2759: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2760: PetscCheck(mat->cmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v1->map->N);
2761: /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2762: PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2763: PetscCheck(mat->rmap->n == v3->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v3->map->n);
2764: PetscCheck(mat->rmap->n == v2->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, v2->map->n);
2765: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2766: MatCheckPreallocated(mat, 1);
2768: PetscCall(PetscLogEventBegin(MAT_MultAdd, mat, v1, v2, v3));
2769: PetscCall(VecLockReadPush(v1));
2770: PetscUseTypeMethod(mat, multadd, v1, v2, v3);
2771: PetscCall(VecLockReadPop(v1));
2772: PetscCall(PetscLogEventEnd(MAT_MultAdd, mat, v1, v2, v3));
2773: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2774: PetscFunctionReturn(PETSC_SUCCESS);
2775: }
2777: /*@
2778: MatMultTransposeAdd - Computes $v3 = v2 + A^T * v1$.
2780: Neighbor-wise Collective
2782: Input Parameters:
2783: + mat - the matrix
2784: . v1 - the vector to be multiplied by the transpose of the matrix
2785: - v2 - the vector to be added to the result
2787: Output Parameter:
2788: . v3 - the result
2790: Level: beginner
2792: Note:
2793: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2794: call `MatMultTransposeAdd`(A,v1,v2,v1).
2796: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2797: @*/
2798: PetscErrorCode MatMultTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2799: {
2800: PetscErrorCode (*op)(Mat, Vec, Vec, Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2802: PetscFunctionBegin;
2809: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2810: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2811: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2812: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2813: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2814: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2815: PetscCheck(op, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)mat)->type_name);
2816: MatCheckPreallocated(mat, 1);
2818: PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd, mat, v1, v2, v3));
2819: PetscCall(VecLockReadPush(v1));
2820: PetscCall((*op)(mat, v1, v2, v3));
2821: PetscCall(VecLockReadPop(v1));
2822: PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd, mat, v1, v2, v3));
2823: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2824: PetscFunctionReturn(PETSC_SUCCESS);
2825: }
2827: /*@
2828: MatMultHermitianTransposeAdd - Computes $v3 = v2 + A^H * v1$.
2830: Neighbor-wise Collective
2832: Input Parameters:
2833: + mat - the matrix
2834: . v1 - the vector to be multiplied by the Hermitian transpose
2835: - v2 - the vector to be added to the result
2837: Output Parameter:
2838: . v3 - the result
2840: Level: beginner
2842: Note:
2843: The vectors `v1` and `v3` cannot be the same. I.e., one cannot
2844: call `MatMultHermitianTransposeAdd`(A,v1,v2,v1).
2846: .seealso: [](ch_matrices), `Mat`, `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2847: @*/
2848: PetscErrorCode MatMultHermitianTransposeAdd(Mat mat, Vec v1, Vec v2, Vec v3)
2849: {
2850: PetscFunctionBegin;
2857: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
2858: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
2859: PetscCheck(v1 != v3, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "v1 and v3 must be different vectors");
2860: PetscCheck(mat->rmap->N == v1->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, v1->map->N);
2861: PetscCheck(mat->cmap->N == v2->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v2->map->N);
2862: PetscCheck(mat->cmap->N == v3->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, v3->map->N);
2863: MatCheckPreallocated(mat, 1);
2865: PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2866: PetscCall(VecLockReadPush(v1));
2867: if (mat->ops->multhermitiantransposeadd) PetscUseTypeMethod(mat, multhermitiantransposeadd, v1, v2, v3);
2868: else {
2869: Vec w, z;
2870: PetscCall(VecDuplicate(v1, &w));
2871: PetscCall(VecCopy(v1, w));
2872: PetscCall(VecConjugate(w));
2873: PetscCall(VecDuplicate(v3, &z));
2874: PetscCall(MatMultTranspose(mat, w, z));
2875: PetscCall(VecDestroy(&w));
2876: PetscCall(VecConjugate(z));
2877: if (v2 != v3) {
2878: PetscCall(VecWAXPY(v3, 1.0, v2, z));
2879: } else {
2880: PetscCall(VecAXPY(v3, 1.0, z));
2881: }
2882: PetscCall(VecDestroy(&z));
2883: }
2884: PetscCall(VecLockReadPop(v1));
2885: PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd, mat, v1, v2, v3));
2886: PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2887: PetscFunctionReturn(PETSC_SUCCESS);
2888: }
2890: /*@C
2891: MatGetFactorType - gets the type of factorization a matrix is
2893: Not Collective
2895: Input Parameter:
2896: . mat - the matrix
2898: Output Parameter:
2899: . t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2901: Level: intermediate
2903: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2904: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2905: @*/
2906: PetscErrorCode MatGetFactorType(Mat mat, MatFactorType *t)
2907: {
2908: PetscFunctionBegin;
2911: PetscAssertPointer(t, 2);
2912: *t = mat->factortype;
2913: PetscFunctionReturn(PETSC_SUCCESS);
2914: }
2916: /*@C
2917: MatSetFactorType - sets the type of factorization a matrix is
2919: Logically Collective
2921: Input Parameters:
2922: + mat - the matrix
2923: - t - the type, one of `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`, `MAT_FACTOR_ICC,MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2925: Level: intermediate
2927: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`, `MAT_FACTOR_NONE`, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ILU`,
2928: `MAT_FACTOR_ICC`,`MAT_FACTOR_ILUDT`, `MAT_FACTOR_QR`
2929: @*/
2930: PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2931: {
2932: PetscFunctionBegin;
2935: mat->factortype = t;
2936: PetscFunctionReturn(PETSC_SUCCESS);
2937: }
2939: /*@C
2940: MatGetInfo - Returns information about matrix storage (number of
2941: nonzeros, memory, etc.).
2943: Collective if `MAT_GLOBAL_MAX` or `MAT_GLOBAL_SUM` is used as the flag
2945: Input Parameters:
2946: + mat - the matrix
2947: - flag - flag indicating the type of parameters to be returned (`MAT_LOCAL` - local matrix, `MAT_GLOBAL_MAX` - maximum over all processors, `MAT_GLOBAL_SUM` - sum over all processors)
2949: Output Parameter:
2950: . info - matrix information context
2952: Options Database Key:
2953: . -mat_view ::ascii_info - print matrix info to `PETSC_STDOUT`
2955: Notes:
2956: The `MatInfo` context contains a variety of matrix data, including
2957: number of nonzeros allocated and used, number of mallocs during
2958: matrix assembly, etc. Additional information for factored matrices
2959: is provided (such as the fill ratio, number of mallocs during
2960: factorization, etc.).
2962: Example:
2963: See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2964: data within the MatInfo context. For example,
2965: .vb
2966: MatInfo info;
2967: Mat A;
2968: double mal, nz_a, nz_u;
2970: MatGetInfo(A, MAT_LOCAL, &info);
2971: mal = info.mallocs;
2972: nz_a = info.nz_allocated;
2973: .ve
2975: Fortran users should declare info as a double precision
2976: array of dimension `MAT_INFO_SIZE`, and then extract the parameters
2977: of interest. See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2978: a complete list of parameter names.
2979: .vb
2980: double precision info(MAT_INFO_SIZE)
2981: double precision mal, nz_a
2982: Mat A
2983: integer ierr
2985: call MatGetInfo(A, MAT_LOCAL, info, ierr)
2986: mal = info(MAT_INFO_MALLOCS)
2987: nz_a = info(MAT_INFO_NZ_ALLOCATED)
2988: .ve
2990: Level: intermediate
2992: Developer Note:
2993: The Fortran interface is not autogenerated as the
2994: interface definition cannot be generated correctly [due to `MatInfo` argument]
2996: .seealso: [](ch_matrices), `Mat`, `MatInfo`, `MatStashGetInfo()`
2997: @*/
2998: PetscErrorCode MatGetInfo(Mat mat, MatInfoType flag, MatInfo *info)
2999: {
3000: PetscFunctionBegin;
3003: PetscAssertPointer(info, 3);
3004: MatCheckPreallocated(mat, 1);
3005: PetscUseTypeMethod(mat, getinfo, flag, info);
3006: PetscFunctionReturn(PETSC_SUCCESS);
3007: }
3009: /*
3010: This is used by external packages where it is not easy to get the info from the actual
3011: matrix factorization.
3012: */
3013: PetscErrorCode MatGetInfo_External(Mat A, MatInfoType flag, MatInfo *info)
3014: {
3015: PetscFunctionBegin;
3016: PetscCall(PetscMemzero(info, sizeof(MatInfo)));
3017: PetscFunctionReturn(PETSC_SUCCESS);
3018: }
3020: /*@C
3021: MatLUFactor - Performs in-place LU factorization of matrix.
3023: Collective
3025: Input Parameters:
3026: + mat - the matrix
3027: . row - row permutation
3028: . col - column permutation
3029: - info - options for factorization, includes
3030: .vb
3031: fill - expected fill as ratio of original fill.
3032: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3033: Run with the option -info to determine an optimal value to use
3034: .ve
3036: Level: developer
3038: Notes:
3039: Most users should employ the `KSP` interface for linear solvers
3040: instead of working directly with matrix algebra routines such as this.
3041: See, e.g., `KSPCreate()`.
3043: This changes the state of the matrix to a factored matrix; it cannot be used
3044: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3046: This is really in-place only for dense matrices, the preferred approach is to use `MatGetFactor()`, `MatLUFactorSymbolic()`, and `MatLUFactorNumeric()`
3047: when not using `KSP`.
3049: Developer Note:
3050: The Fortran interface is not autogenerated as the
3051: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3053: .seealso: [](ch_matrices), [Matrix Factorization](sec_matfactor), `Mat`, `MatFactorType`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
3054: `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3055: @*/
3056: PetscErrorCode MatLUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3057: {
3058: MatFactorInfo tinfo;
3060: PetscFunctionBegin;
3064: if (info) PetscAssertPointer(info, 4);
3066: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3067: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3068: MatCheckPreallocated(mat, 1);
3069: if (!info) {
3070: PetscCall(MatFactorInfoInitialize(&tinfo));
3071: info = &tinfo;
3072: }
3074: PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, row, col, 0));
3075: PetscUseTypeMethod(mat, lufactor, row, col, info);
3076: PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, row, col, 0));
3077: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3078: PetscFunctionReturn(PETSC_SUCCESS);
3079: }
3081: /*@C
3082: MatILUFactor - Performs in-place ILU factorization of matrix.
3084: Collective
3086: Input Parameters:
3087: + mat - the matrix
3088: . row - row permutation
3089: . col - column permutation
3090: - info - structure containing
3091: .vb
3092: levels - number of levels of fill.
3093: expected fill - as ratio of original fill.
3094: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
3095: missing diagonal entries)
3096: .ve
3098: Level: developer
3100: Notes:
3101: Most users should employ the `KSP` interface for linear solvers
3102: instead of working directly with matrix algebra routines such as this.
3103: See, e.g., `KSPCreate()`.
3105: Probably really in-place only when level of fill is zero, otherwise allocates
3106: new space to store factored matrix and deletes previous memory. The preferred approach is to use `MatGetFactor()`, `MatILUFactorSymbolic()`, and `MatILUFactorNumeric()`
3107: when not using `KSP`.
3109: Developer Note:
3110: The Fortran interface is not autogenerated as the
3111: interface definition cannot be generated correctly [due to MatFactorInfo]
3113: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
3114: @*/
3115: PetscErrorCode MatILUFactor(Mat mat, IS row, IS col, const MatFactorInfo *info)
3116: {
3117: PetscFunctionBegin;
3121: PetscAssertPointer(info, 4);
3123: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
3124: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3125: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3126: MatCheckPreallocated(mat, 1);
3128: PetscCall(PetscLogEventBegin(MAT_ILUFactor, mat, row, col, 0));
3129: PetscUseTypeMethod(mat, ilufactor, row, col, info);
3130: PetscCall(PetscLogEventEnd(MAT_ILUFactor, mat, row, col, 0));
3131: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3132: PetscFunctionReturn(PETSC_SUCCESS);
3133: }
3135: /*@C
3136: MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
3137: Call this routine before calling `MatLUFactorNumeric()` and after `MatGetFactor()`.
3139: Collective
3141: Input Parameters:
3142: + fact - the factor matrix obtained with `MatGetFactor()`
3143: . mat - the matrix
3144: . row - the row permutation
3145: . col - the column permutation
3146: - info - options for factorization, includes
3147: .vb
3148: fill - expected fill as ratio of original fill. Run with the option -info to determine an optimal value to use
3149: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3150: .ve
3152: Level: developer
3154: Notes:
3155: See [Matrix Factorization](sec_matfactor) for additional information about factorizations
3157: Most users should employ the simplified `KSP` interface for linear solvers
3158: instead of working directly with matrix algebra routines such as this.
3159: See, e.g., `KSPCreate()`.
3161: Developer Note:
3162: The Fortran interface is not autogenerated as the
3163: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3165: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3166: @*/
3167: PetscErrorCode MatLUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
3168: {
3169: MatFactorInfo tinfo;
3171: PetscFunctionBegin;
3176: if (info) PetscAssertPointer(info, 5);
3179: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3180: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3181: MatCheckPreallocated(mat, 2);
3182: if (!info) {
3183: PetscCall(MatFactorInfoInitialize(&tinfo));
3184: info = &tinfo;
3185: }
3187: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic, mat, row, col, 0));
3188: PetscUseTypeMethod(fact, lufactorsymbolic, mat, row, col, info);
3189: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic, mat, row, col, 0));
3190: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3191: PetscFunctionReturn(PETSC_SUCCESS);
3192: }
3194: /*@C
3195: MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3196: Call this routine after first calling `MatLUFactorSymbolic()` and `MatGetFactor()`.
3198: Collective
3200: Input Parameters:
3201: + fact - the factor matrix obtained with `MatGetFactor()`
3202: . mat - the matrix
3203: - info - options for factorization
3205: Level: developer
3207: Notes:
3208: See `MatLUFactor()` for in-place factorization. See
3209: `MatCholeskyFactorNumeric()` for the symmetric, positive definite case.
3211: Most users should employ the `KSP` interface for linear solvers
3212: instead of working directly with matrix algebra routines such as this.
3213: See, e.g., `KSPCreate()`.
3215: Developer Note:
3216: The Fortran interface is not autogenerated as the
3217: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3219: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3220: @*/
3221: PetscErrorCode MatLUFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3222: {
3223: MatFactorInfo tinfo;
3225: PetscFunctionBegin;
3230: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3231: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3232: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3234: MatCheckPreallocated(mat, 2);
3235: if (!info) {
3236: PetscCall(MatFactorInfoInitialize(&tinfo));
3237: info = &tinfo;
3238: }
3240: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric, mat, fact, 0, 0));
3241: else PetscCall(PetscLogEventBegin(MAT_LUFactor, mat, fact, 0, 0));
3242: PetscUseTypeMethod(fact, lufactornumeric, mat, info);
3243: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric, mat, fact, 0, 0));
3244: else PetscCall(PetscLogEventEnd(MAT_LUFactor, mat, fact, 0, 0));
3245: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3246: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3247: PetscFunctionReturn(PETSC_SUCCESS);
3248: }
3250: /*@C
3251: MatCholeskyFactor - Performs in-place Cholesky factorization of a
3252: symmetric matrix.
3254: Collective
3256: Input Parameters:
3257: + mat - the matrix
3258: . perm - row and column permutations
3259: - info - expected fill as ratio of original fill
3261: Level: developer
3263: Notes:
3264: See `MatLUFactor()` for the nonsymmetric case. See also `MatGetFactor()`,
3265: `MatCholeskyFactorSymbolic()`, and `MatCholeskyFactorNumeric()`.
3267: Most users should employ the `KSP` interface for linear solvers
3268: instead of working directly with matrix algebra routines such as this.
3269: See, e.g., `KSPCreate()`.
3271: Developer Note:
3272: The Fortran interface is not autogenerated as the
3273: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3275: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3276: `MatGetOrdering()`
3277: @*/
3278: PetscErrorCode MatCholeskyFactor(Mat mat, IS perm, const MatFactorInfo *info)
3279: {
3280: MatFactorInfo tinfo;
3282: PetscFunctionBegin;
3285: if (info) PetscAssertPointer(info, 3);
3287: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3288: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3289: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3290: MatCheckPreallocated(mat, 1);
3291: if (!info) {
3292: PetscCall(MatFactorInfoInitialize(&tinfo));
3293: info = &tinfo;
3294: }
3296: PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, perm, 0, 0));
3297: PetscUseTypeMethod(mat, choleskyfactor, perm, info);
3298: PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, perm, 0, 0));
3299: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3300: PetscFunctionReturn(PETSC_SUCCESS);
3301: }
3303: /*@C
3304: MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3305: of a symmetric matrix.
3307: Collective
3309: Input Parameters:
3310: + fact - the factor matrix obtained with `MatGetFactor()`
3311: . mat - the matrix
3312: . perm - row and column permutations
3313: - info - options for factorization, includes
3314: .vb
3315: fill - expected fill as ratio of original fill.
3316: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3317: Run with the option -info to determine an optimal value to use
3318: .ve
3320: Level: developer
3322: Notes:
3323: See `MatLUFactorSymbolic()` for the nonsymmetric case. See also
3324: `MatCholeskyFactor()` and `MatCholeskyFactorNumeric()`.
3326: Most users should employ the `KSP` interface for linear solvers
3327: instead of working directly with matrix algebra routines such as this.
3328: See, e.g., `KSPCreate()`.
3330: Developer Note:
3331: The Fortran interface is not autogenerated as the
3332: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3334: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3335: `MatGetOrdering()`
3336: @*/
3337: PetscErrorCode MatCholeskyFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
3338: {
3339: MatFactorInfo tinfo;
3341: PetscFunctionBegin;
3345: if (info) PetscAssertPointer(info, 4);
3348: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "Matrix must be square");
3349: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3350: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3351: MatCheckPreallocated(mat, 2);
3352: if (!info) {
3353: PetscCall(MatFactorInfoInitialize(&tinfo));
3354: info = &tinfo;
3355: }
3357: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3358: PetscUseTypeMethod(fact, choleskyfactorsymbolic, mat, perm, info);
3359: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic, mat, perm, 0, 0));
3360: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3361: PetscFunctionReturn(PETSC_SUCCESS);
3362: }
3364: /*@C
3365: MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3366: of a symmetric matrix. Call this routine after first calling `MatGetFactor()` and
3367: `MatCholeskyFactorSymbolic()`.
3369: Collective
3371: Input Parameters:
3372: + fact - the factor matrix obtained with `MatGetFactor()`, where the factored values are stored
3373: . mat - the initial matrix that is to be factored
3374: - info - options for factorization
3376: Level: developer
3378: Note:
3379: Most users should employ the `KSP` interface for linear solvers
3380: instead of working directly with matrix algebra routines such as this.
3381: See, e.g., `KSPCreate()`.
3383: Developer Note:
3384: The Fortran interface is not autogenerated as the
3385: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3387: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3388: @*/
3389: PetscErrorCode MatCholeskyFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3390: {
3391: MatFactorInfo tinfo;
3393: PetscFunctionBegin;
3398: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3399: PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3400: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3401: MatCheckPreallocated(mat, 2);
3402: if (!info) {
3403: PetscCall(MatFactorInfoInitialize(&tinfo));
3404: info = &tinfo;
3405: }
3407: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3408: else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor, mat, fact, 0, 0));
3409: PetscUseTypeMethod(fact, choleskyfactornumeric, mat, info);
3410: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric, mat, fact, 0, 0));
3411: else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor, mat, fact, 0, 0));
3412: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3413: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3414: PetscFunctionReturn(PETSC_SUCCESS);
3415: }
3417: /*@
3418: MatQRFactor - Performs in-place QR factorization of matrix.
3420: Collective
3422: Input Parameters:
3423: + mat - the matrix
3424: . col - column permutation
3425: - info - options for factorization, includes
3426: .vb
3427: fill - expected fill as ratio of original fill.
3428: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3429: Run with the option -info to determine an optimal value to use
3430: .ve
3432: Level: developer
3434: Notes:
3435: Most users should employ the `KSP` interface for linear solvers
3436: instead of working directly with matrix algebra routines such as this.
3437: See, e.g., `KSPCreate()`.
3439: This changes the state of the matrix to a factored matrix; it cannot be used
3440: for example with `MatSetValues()` unless one first calls `MatSetUnfactored()`.
3442: Developer Note:
3443: The Fortran interface is not autogenerated as the
3444: interface definition cannot be generated correctly [due to MatFactorInfo]
3446: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3447: `MatSetUnfactored()`
3448: @*/
3449: PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3450: {
3451: PetscFunctionBegin;
3454: if (info) PetscAssertPointer(info, 3);
3456: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3457: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3458: MatCheckPreallocated(mat, 1);
3459: PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, col, 0, 0));
3460: PetscUseMethod(mat, "MatQRFactor_C", (Mat, IS, const MatFactorInfo *), (mat, col, info));
3461: PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, col, 0, 0));
3462: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3463: PetscFunctionReturn(PETSC_SUCCESS);
3464: }
3466: /*@
3467: MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3468: Call this routine after `MatGetFactor()` but before calling `MatQRFactorNumeric()`.
3470: Collective
3472: Input Parameters:
3473: + fact - the factor matrix obtained with `MatGetFactor()`
3474: . mat - the matrix
3475: . col - column permutation
3476: - info - options for factorization, includes
3477: .vb
3478: fill - expected fill as ratio of original fill.
3479: dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3480: Run with the option -info to determine an optimal value to use
3481: .ve
3483: Level: developer
3485: Note:
3486: Most users should employ the `KSP` interface for linear solvers
3487: instead of working directly with matrix algebra routines such as this.
3488: See, e.g., `KSPCreate()`.
3490: Developer Note:
3491: The Fortran interface is not autogenerated as the
3492: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3494: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatFactorInfo`, `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfoInitialize()`
3495: @*/
3496: PetscErrorCode MatQRFactorSymbolic(Mat fact, Mat mat, IS col, const MatFactorInfo *info)
3497: {
3498: MatFactorInfo tinfo;
3500: PetscFunctionBegin;
3504: if (info) PetscAssertPointer(info, 4);
3507: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3508: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
3509: MatCheckPreallocated(mat, 2);
3510: if (!info) {
3511: PetscCall(MatFactorInfoInitialize(&tinfo));
3512: info = &tinfo;
3513: }
3515: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic, fact, mat, col, 0));
3516: PetscUseMethod(fact, "MatQRFactorSymbolic_C", (Mat, Mat, IS, const MatFactorInfo *), (fact, mat, col, info));
3517: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic, fact, mat, col, 0));
3518: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3519: PetscFunctionReturn(PETSC_SUCCESS);
3520: }
3522: /*@
3523: MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3524: Call this routine after first calling `MatGetFactor()`, and `MatQRFactorSymbolic()`.
3526: Collective
3528: Input Parameters:
3529: + fact - the factor matrix obtained with `MatGetFactor()`
3530: . mat - the matrix
3531: - info - options for factorization
3533: Level: developer
3535: Notes:
3536: See `MatQRFactor()` for in-place factorization.
3538: Most users should employ the `KSP` interface for linear solvers
3539: instead of working directly with matrix algebra routines such as this.
3540: See, e.g., `KSPCreate()`.
3542: Developer Note:
3543: The Fortran interface is not autogenerated as the
3544: interface definition cannot be generated correctly [due to `MatFactorInfo`]
3546: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorInfo`, `MatGetFactor()`, `MatQRFactor()`, `MatQRFactorSymbolic()`, `MatLUFactor()`
3547: @*/
3548: PetscErrorCode MatQRFactorNumeric(Mat fact, Mat mat, const MatFactorInfo *info)
3549: {
3550: MatFactorInfo tinfo;
3552: PetscFunctionBegin;
3557: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
3558: PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,
3559: mat->rmap->N, (fact)->rmap->N, mat->cmap->N, (fact)->cmap->N);
3561: MatCheckPreallocated(mat, 2);
3562: if (!info) {
3563: PetscCall(MatFactorInfoInitialize(&tinfo));
3564: info = &tinfo;
3565: }
3567: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric, mat, fact, 0, 0));
3568: else PetscCall(PetscLogEventBegin(MAT_QRFactor, mat, fact, 0, 0));
3569: PetscUseMethod(fact, "MatQRFactorNumeric_C", (Mat, Mat, const MatFactorInfo *), (fact, mat, info));
3570: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric, mat, fact, 0, 0));
3571: else PetscCall(PetscLogEventEnd(MAT_QRFactor, mat, fact, 0, 0));
3572: PetscCall(MatViewFromOptions(fact, NULL, "-mat_factor_view"));
3573: PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3574: PetscFunctionReturn(PETSC_SUCCESS);
3575: }
3577: /*@
3578: MatSolve - Solves $A x = b$, given a factored matrix.
3580: Neighbor-wise Collective
3582: Input Parameters:
3583: + mat - the factored matrix
3584: - b - the right-hand-side vector
3586: Output Parameter:
3587: . x - the result vector
3589: Level: developer
3591: Notes:
3592: The vectors `b` and `x` cannot be the same. I.e., one cannot
3593: call `MatSolve`(A,x,x).
3595: Most users should employ the `KSP` interface for linear solvers
3596: instead of working directly with matrix algebra routines such as this.
3597: See, e.g., `KSPCreate()`.
3599: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactor()`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3600: @*/
3601: PetscErrorCode MatSolve(Mat mat, Vec b, Vec x)
3602: {
3603: PetscFunctionBegin;
3608: PetscCheckSameComm(mat, 1, b, 2);
3609: PetscCheckSameComm(mat, 1, x, 3);
3610: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3611: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3612: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3613: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3614: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3615: MatCheckPreallocated(mat, 1);
3617: PetscCall(PetscLogEventBegin(MAT_Solve, mat, b, x, 0));
3618: if (mat->factorerrortype) {
3619: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3620: PetscCall(VecSetInf(x));
3621: } else PetscUseTypeMethod(mat, solve, b, x);
3622: PetscCall(PetscLogEventEnd(MAT_Solve, mat, b, x, 0));
3623: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3624: PetscFunctionReturn(PETSC_SUCCESS);
3625: }
3627: static PetscErrorCode MatMatSolve_Basic(Mat A, Mat B, Mat X, PetscBool trans)
3628: {
3629: Vec b, x;
3630: PetscInt N, i;
3631: PetscErrorCode (*f)(Mat, Vec, Vec);
3632: PetscBool Abound, Bneedconv = PETSC_FALSE, Xneedconv = PETSC_FALSE;
3634: PetscFunctionBegin;
3635: if (A->factorerrortype) {
3636: PetscCall(PetscInfo(A, "MatFactorError %d\n", A->factorerrortype));
3637: PetscCall(MatSetInf(X));
3638: PetscFunctionReturn(PETSC_SUCCESS);
3639: }
3640: f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3641: PetscCheck(f, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Mat type %s", ((PetscObject)A)->type_name);
3642: PetscCall(MatBoundToCPU(A, &Abound));
3643: if (!Abound) {
3644: PetscCall(PetscObjectTypeCompareAny((PetscObject)B, &Bneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3645: PetscCall(PetscObjectTypeCompareAny((PetscObject)X, &Xneedconv, MATSEQDENSE, MATMPIDENSE, ""));
3646: }
3647: #if PetscDefined(HAVE_CUDA)
3648: if (Bneedconv) PetscCall(MatConvert(B, MATDENSECUDA, MAT_INPLACE_MATRIX, &B));
3649: if (Xneedconv) PetscCall(MatConvert(X, MATDENSECUDA, MAT_INPLACE_MATRIX, &X));
3650: #elif PetscDefined(HAVE_HIP)
3651: if (Bneedconv) PetscCall(MatConvert(B, MATDENSEHIP, MAT_INPLACE_MATRIX, &B));
3652: if (Xneedconv) PetscCall(MatConvert(X, MATDENSEHIP, MAT_INPLACE_MATRIX, &X));
3653: #endif
3654: PetscCall(MatGetSize(B, NULL, &N));
3655: for (i = 0; i < N; i++) {
3656: PetscCall(MatDenseGetColumnVecRead(B, i, &b));
3657: PetscCall(MatDenseGetColumnVecWrite(X, i, &x));
3658: PetscCall((*f)(A, b, x));
3659: PetscCall(MatDenseRestoreColumnVecWrite(X, i, &x));
3660: PetscCall(MatDenseRestoreColumnVecRead(B, i, &b));
3661: }
3662: if (Bneedconv) PetscCall(MatConvert(B, MATDENSE, MAT_INPLACE_MATRIX, &B));
3663: if (Xneedconv) PetscCall(MatConvert(X, MATDENSE, MAT_INPLACE_MATRIX, &X));
3664: PetscFunctionReturn(PETSC_SUCCESS);
3665: }
3667: /*@
3668: MatMatSolve - Solves $A X = B$, given a factored matrix.
3670: Neighbor-wise Collective
3672: Input Parameters:
3673: + A - the factored matrix
3674: - B - the right-hand-side matrix `MATDENSE` (or sparse `MATAIJ`-- when using MUMPS)
3676: Output Parameter:
3677: . X - the result matrix (dense matrix)
3679: Level: developer
3681: Note:
3682: If `B` is a `MATDENSE` matrix then one can call `MatMatSolve`(A,B,B) except with `MATSOLVERMKL_CPARDISO`;
3683: otherwise, `B` and `X` cannot be the same.
3685: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3686: @*/
3687: PetscErrorCode MatMatSolve(Mat A, Mat B, Mat X)
3688: {
3689: PetscFunctionBegin;
3694: PetscCheckSameComm(A, 1, B, 2);
3695: PetscCheckSameComm(A, 1, X, 3);
3696: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3697: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3698: PetscCheck(X->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3699: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3700: MatCheckPreallocated(A, 1);
3702: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3703: if (!A->ops->matsolve) {
3704: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolve\n", ((PetscObject)A)->type_name));
3705: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_FALSE));
3706: } else PetscUseTypeMethod(A, matsolve, B, X);
3707: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3708: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3709: PetscFunctionReturn(PETSC_SUCCESS);
3710: }
3712: /*@
3713: MatMatSolveTranspose - Solves $A^T X = B $, given a factored matrix.
3715: Neighbor-wise Collective
3717: Input Parameters:
3718: + A - the factored matrix
3719: - B - the right-hand-side matrix (`MATDENSE` matrix)
3721: Output Parameter:
3722: . X - the result matrix (dense matrix)
3724: Level: developer
3726: Note:
3727: The matrices `B` and `X` cannot be the same. I.e., one cannot
3728: call `MatMatSolveTranspose`(A,X,X).
3730: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolveTranspose()`, `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3731: @*/
3732: PetscErrorCode MatMatSolveTranspose(Mat A, Mat B, Mat X)
3733: {
3734: PetscFunctionBegin;
3739: PetscCheckSameComm(A, 1, B, 2);
3740: PetscCheckSameComm(A, 1, X, 3);
3741: PetscCheck(X != B, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3742: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3743: PetscCheck(A->rmap->N == B->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N);
3744: PetscCheck(A->rmap->n == B->rmap->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->n, B->rmap->n);
3745: PetscCheck(X->cmap->N >= B->cmap->N, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as rhs matrix");
3746: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3747: MatCheckPreallocated(A, 1);
3749: PetscCall(PetscLogEventBegin(MAT_MatSolve, A, B, X, 0));
3750: if (!A->ops->matsolvetranspose) {
3751: PetscCall(PetscInfo(A, "Mat type %s using basic MatMatSolveTranspose\n", ((PetscObject)A)->type_name));
3752: PetscCall(MatMatSolve_Basic(A, B, X, PETSC_TRUE));
3753: } else PetscUseTypeMethod(A, matsolvetranspose, B, X);
3754: PetscCall(PetscLogEventEnd(MAT_MatSolve, A, B, X, 0));
3755: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3756: PetscFunctionReturn(PETSC_SUCCESS);
3757: }
3759: /*@
3760: MatMatTransposeSolve - Solves $A X = B^T$, given a factored matrix.
3762: Neighbor-wise Collective
3764: Input Parameters:
3765: + A - the factored matrix
3766: - Bt - the transpose of right-hand-side matrix as a `MATDENSE`
3768: Output Parameter:
3769: . X - the result matrix (dense matrix)
3771: Level: developer
3773: Note:
3774: For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create `Bt` in sparse compressed row
3775: format on the host processor and call `MatMatTransposeSolve()` to implement MUMPS' `MatMatSolve()`.
3777: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3778: @*/
3779: PetscErrorCode MatMatTransposeSolve(Mat A, Mat Bt, Mat X)
3780: {
3781: PetscFunctionBegin;
3786: PetscCheckSameComm(A, 1, Bt, 2);
3787: PetscCheckSameComm(A, 1, X, 3);
3789: PetscCheck(X != Bt, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_IDN, "X and B must be different matrices");
3790: PetscCheck(A->cmap->N == X->rmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->cmap->N, X->rmap->N);
3791: PetscCheck(A->rmap->N == Bt->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, Bt->cmap->N);
3792: PetscCheck(X->cmap->N >= Bt->rmap->N, PetscObjectComm((PetscObject)X), PETSC_ERR_ARG_SIZ, "Solution matrix must have same number of columns as row number of the rhs matrix");
3793: if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3794: PetscCheck(A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
3795: MatCheckPreallocated(A, 1);
3797: PetscCall(PetscLogEventBegin(MAT_MatTrSolve, A, Bt, X, 0));
3798: PetscUseTypeMethod(A, mattransposesolve, Bt, X);
3799: PetscCall(PetscLogEventEnd(MAT_MatTrSolve, A, Bt, X, 0));
3800: PetscCall(PetscObjectStateIncrease((PetscObject)X));
3801: PetscFunctionReturn(PETSC_SUCCESS);
3802: }
3804: /*@
3805: MatForwardSolve - Solves $ L x = b $, given a factored matrix, $A = LU $, or
3806: $U^T*D^(1/2) x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3808: Neighbor-wise Collective
3810: Input Parameters:
3811: + mat - the factored matrix
3812: - b - the right-hand-side vector
3814: Output Parameter:
3815: . x - the result vector
3817: Level: developer
3819: Notes:
3820: `MatSolve()` should be used for most applications, as it performs
3821: a forward solve followed by a backward solve.
3823: The vectors `b` and `x` cannot be the same, i.e., one cannot
3824: call `MatForwardSolve`(A,x,x).
3826: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3827: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3828: `MatForwardSolve()` solves $U^T*D y = b$, and
3829: `MatBackwardSolve()` solves $U x = y$.
3830: Thus they do not provide a symmetric preconditioner.
3832: .seealso: [](ch_matrices), `Mat`, `MatBackwardSolve()`, `MatGetFactor()`, `MatSolve()`
3833: @*/
3834: PetscErrorCode MatForwardSolve(Mat mat, Vec b, Vec x)
3835: {
3836: PetscFunctionBegin;
3841: PetscCheckSameComm(mat, 1, b, 2);
3842: PetscCheckSameComm(mat, 1, x, 3);
3843: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3844: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3845: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3846: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3847: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3848: MatCheckPreallocated(mat, 1);
3850: PetscCall(PetscLogEventBegin(MAT_ForwardSolve, mat, b, x, 0));
3851: PetscUseTypeMethod(mat, forwardsolve, b, x);
3852: PetscCall(PetscLogEventEnd(MAT_ForwardSolve, mat, b, x, 0));
3853: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3854: PetscFunctionReturn(PETSC_SUCCESS);
3855: }
3857: /*@
3858: MatBackwardSolve - Solves $U x = b$, given a factored matrix, $A = LU$.
3859: $D^(1/2) U x = b$, given a factored symmetric matrix, $A = U^T*D*U$,
3861: Neighbor-wise Collective
3863: Input Parameters:
3864: + mat - the factored matrix
3865: - b - the right-hand-side vector
3867: Output Parameter:
3868: . x - the result vector
3870: Level: developer
3872: Notes:
3873: `MatSolve()` should be used for most applications, as it performs
3874: a forward solve followed by a backward solve.
3876: The vectors `b` and `x` cannot be the same. I.e., one cannot
3877: call `MatBackwardSolve`(A,x,x).
3879: For matrix in `MATSEQBAIJ` format with block size larger than 1,
3880: the diagonal blocks are not implemented as $D = D^(1/2) * D^(1/2)$ yet.
3881: `MatForwardSolve()` solves $U^T*D y = b$, and
3882: `MatBackwardSolve()` solves $U x = y$.
3883: Thus they do not provide a symmetric preconditioner.
3885: .seealso: [](ch_matrices), `Mat`, `MatForwardSolve()`, `MatGetFactor()`, `MatSolve()`
3886: @*/
3887: PetscErrorCode MatBackwardSolve(Mat mat, Vec b, Vec x)
3888: {
3889: PetscFunctionBegin;
3894: PetscCheckSameComm(mat, 1, b, 2);
3895: PetscCheckSameComm(mat, 1, x, 3);
3896: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3897: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3898: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3899: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3900: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3901: MatCheckPreallocated(mat, 1);
3903: PetscCall(PetscLogEventBegin(MAT_BackwardSolve, mat, b, x, 0));
3904: PetscUseTypeMethod(mat, backwardsolve, b, x);
3905: PetscCall(PetscLogEventEnd(MAT_BackwardSolve, mat, b, x, 0));
3906: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3907: PetscFunctionReturn(PETSC_SUCCESS);
3908: }
3910: /*@
3911: MatSolveAdd - Computes $x = y + A^{-1}*b$, given a factored matrix.
3913: Neighbor-wise Collective
3915: Input Parameters:
3916: + mat - the factored matrix
3917: . b - the right-hand-side vector
3918: - y - the vector to be added to
3920: Output Parameter:
3921: . x - the result vector
3923: Level: developer
3925: Note:
3926: The vectors `b` and `x` cannot be the same. I.e., one cannot
3927: call `MatSolveAdd`(A,x,y,x).
3929: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolve()`, `MatGetFactor()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3930: @*/
3931: PetscErrorCode MatSolveAdd(Mat mat, Vec b, Vec y, Vec x)
3932: {
3933: PetscScalar one = 1.0;
3934: Vec tmp;
3936: PetscFunctionBegin;
3942: PetscCheckSameComm(mat, 1, b, 2);
3943: PetscCheckSameComm(mat, 1, y, 3);
3944: PetscCheckSameComm(mat, 1, x, 4);
3945: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
3946: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
3947: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
3948: PetscCheck(mat->rmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, y->map->N);
3949: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
3950: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
3951: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
3952: MatCheckPreallocated(mat, 1);
3954: PetscCall(PetscLogEventBegin(MAT_SolveAdd, mat, b, x, y));
3955: if (mat->factorerrortype) {
3956: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
3957: PetscCall(VecSetInf(x));
3958: } else if (mat->ops->solveadd) {
3959: PetscUseTypeMethod(mat, solveadd, b, y, x);
3960: } else {
3961: /* do the solve then the add manually */
3962: if (x != y) {
3963: PetscCall(MatSolve(mat, b, x));
3964: PetscCall(VecAXPY(x, one, y));
3965: } else {
3966: PetscCall(VecDuplicate(x, &tmp));
3967: PetscCall(VecCopy(x, tmp));
3968: PetscCall(MatSolve(mat, b, x));
3969: PetscCall(VecAXPY(x, one, tmp));
3970: PetscCall(VecDestroy(&tmp));
3971: }
3972: }
3973: PetscCall(PetscLogEventEnd(MAT_SolveAdd, mat, b, x, y));
3974: PetscCall(PetscObjectStateIncrease((PetscObject)x));
3975: PetscFunctionReturn(PETSC_SUCCESS);
3976: }
3978: /*@
3979: MatSolveTranspose - Solves $A^T x = b$, given a factored matrix.
3981: Neighbor-wise Collective
3983: Input Parameters:
3984: + mat - the factored matrix
3985: - b - the right-hand-side vector
3987: Output Parameter:
3988: . x - the result vector
3990: Level: developer
3992: Notes:
3993: The vectors `b` and `x` cannot be the same. I.e., one cannot
3994: call `MatSolveTranspose`(A,x,x).
3996: Most users should employ the `KSP` interface for linear solvers
3997: instead of working directly with matrix algebra routines such as this.
3998: See, e.g., `KSPCreate()`.
4000: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `KSP`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
4001: @*/
4002: PetscErrorCode MatSolveTranspose(Mat mat, Vec b, Vec x)
4003: {
4004: PetscErrorCode (*f)(Mat, Vec, Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
4006: PetscFunctionBegin;
4011: PetscCheckSameComm(mat, 1, b, 2);
4012: PetscCheckSameComm(mat, 1, x, 3);
4013: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4014: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4015: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4016: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4017: MatCheckPreallocated(mat, 1);
4018: PetscCall(PetscLogEventBegin(MAT_SolveTranspose, mat, b, x, 0));
4019: if (mat->factorerrortype) {
4020: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4021: PetscCall(VecSetInf(x));
4022: } else {
4023: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Matrix type %s", ((PetscObject)mat)->type_name);
4024: PetscCall((*f)(mat, b, x));
4025: }
4026: PetscCall(PetscLogEventEnd(MAT_SolveTranspose, mat, b, x, 0));
4027: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4028: PetscFunctionReturn(PETSC_SUCCESS);
4029: }
4031: /*@
4032: MatSolveTransposeAdd - Computes $x = y + A^{-T} b$
4033: factored matrix.
4035: Neighbor-wise Collective
4037: Input Parameters:
4038: + mat - the factored matrix
4039: . b - the right-hand-side vector
4040: - y - the vector to be added to
4042: Output Parameter:
4043: . x - the result vector
4045: Level: developer
4047: Note:
4048: The vectors `b` and `x` cannot be the same. I.e., one cannot
4049: call `MatSolveTransposeAdd`(A,x,y,x).
4051: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
4052: @*/
4053: PetscErrorCode MatSolveTransposeAdd(Mat mat, Vec b, Vec y, Vec x)
4054: {
4055: PetscScalar one = 1.0;
4056: Vec tmp;
4057: PetscErrorCode (*f)(Mat, Vec, Vec, Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
4059: PetscFunctionBegin;
4065: PetscCheckSameComm(mat, 1, b, 2);
4066: PetscCheckSameComm(mat, 1, y, 3);
4067: PetscCheckSameComm(mat, 1, x, 4);
4068: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
4069: PetscCheck(mat->rmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, x->map->N);
4070: PetscCheck(mat->cmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, b->map->N);
4071: PetscCheck(mat->cmap->N == y->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, y->map->N);
4072: PetscCheck(x->map->n == y->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT, x->map->n, y->map->n);
4073: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
4074: MatCheckPreallocated(mat, 1);
4076: PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd, mat, b, x, y));
4077: if (mat->factorerrortype) {
4078: PetscCall(PetscInfo(mat, "MatFactorError %d\n", mat->factorerrortype));
4079: PetscCall(VecSetInf(x));
4080: } else if (f) {
4081: PetscCall((*f)(mat, b, y, x));
4082: } else {
4083: /* do the solve then the add manually */
4084: if (x != y) {
4085: PetscCall(MatSolveTranspose(mat, b, x));
4086: PetscCall(VecAXPY(x, one, y));
4087: } else {
4088: PetscCall(VecDuplicate(x, &tmp));
4089: PetscCall(VecCopy(x, tmp));
4090: PetscCall(MatSolveTranspose(mat, b, x));
4091: PetscCall(VecAXPY(x, one, tmp));
4092: PetscCall(VecDestroy(&tmp));
4093: }
4094: }
4095: PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd, mat, b, x, y));
4096: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4097: PetscFunctionReturn(PETSC_SUCCESS);
4098: }
4100: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
4101: /*@
4102: MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
4104: Neighbor-wise Collective
4106: Input Parameters:
4107: + mat - the matrix
4108: . b - the right hand side
4109: . omega - the relaxation factor
4110: . flag - flag indicating the type of SOR (see below)
4111: . shift - diagonal shift
4112: . its - the number of iterations
4113: - lits - the number of local iterations
4115: Output Parameter:
4116: . x - the solution (can contain an initial guess, use option `SOR_ZERO_INITIAL_GUESS` to indicate no guess)
4118: SOR Flags:
4119: + `SOR_FORWARD_SWEEP` - forward SOR
4120: . `SOR_BACKWARD_SWEEP` - backward SOR
4121: . `SOR_SYMMETRIC_SWEEP` - SSOR (symmetric SOR)
4122: . `SOR_LOCAL_FORWARD_SWEEP` - local forward SOR
4123: . `SOR_LOCAL_BACKWARD_SWEEP` - local forward SOR
4124: . `SOR_LOCAL_SYMMETRIC_SWEEP` - local SSOR
4125: . `SOR_EISENSTAT` - SOR with Eisenstat trick
4126: . `SOR_APPLY_UPPER`, `SOR_APPLY_LOWER` - applies
4127: upper/lower triangular part of matrix to
4128: vector (with omega)
4129: - `SOR_ZERO_INITIAL_GUESS` - zero initial guess
4131: Level: developer
4133: Notes:
4134: `SOR_LOCAL_FORWARD_SWEEP`, `SOR_LOCAL_BACKWARD_SWEEP`, and
4135: `SOR_LOCAL_SYMMETRIC_SWEEP` perform separate independent smoothings
4136: on each processor.
4138: Application programmers will not generally use `MatSOR()` directly,
4139: but instead will employ the `KSP`/`PC` interface.
4141: For `MATBAIJ`, `MATSBAIJ`, and `MATAIJ` matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4143: Most users should employ the `KSP` interface for linear solvers
4144: instead of working directly with matrix algebra routines such as this.
4145: See, e.g., `KSPCreate()`.
4147: Vectors `x` and `b` CANNOT be the same
4149: The flags are implemented as bitwise inclusive or operations.
4150: For example, use (`SOR_ZERO_INITIAL_GUESS` | `SOR_SYMMETRIC_SWEEP`)
4151: to specify a zero initial guess for SSOR.
4153: Developer Note:
4154: We should add block SOR support for `MATAIJ` matrices with block size set to great than one and no inodes
4156: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `KSP`, `PC`, `MatGetFactor()`
4157: @*/
4158: PetscErrorCode MatSOR(Mat mat, Vec b, PetscReal omega, MatSORType flag, PetscReal shift, PetscInt its, PetscInt lits, Vec x)
4159: {
4160: PetscFunctionBegin;
4165: PetscCheckSameComm(mat, 1, b, 2);
4166: PetscCheckSameComm(mat, 1, x, 8);
4167: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4168: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4169: PetscCheck(mat->cmap->N == x->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->cmap->N, x->map->N);
4170: PetscCheck(mat->rmap->N == b->map->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->N, b->map->N);
4171: PetscCheck(mat->rmap->n == b->map->n, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT, mat->rmap->n, b->map->n);
4172: PetscCheck(its > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires global its %" PetscInt_FMT " positive", its);
4173: PetscCheck(lits > 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Relaxation requires local its %" PetscInt_FMT " positive", lits);
4174: PetscCheck(b != x, PETSC_COMM_SELF, PETSC_ERR_ARG_IDN, "b and x vector cannot be the same");
4176: MatCheckPreallocated(mat, 1);
4177: PetscCall(PetscLogEventBegin(MAT_SOR, mat, b, x, 0));
4178: PetscUseTypeMethod(mat, sor, b, omega, flag, shift, its, lits, x);
4179: PetscCall(PetscLogEventEnd(MAT_SOR, mat, b, x, 0));
4180: PetscCall(PetscObjectStateIncrease((PetscObject)x));
4181: PetscFunctionReturn(PETSC_SUCCESS);
4182: }
4184: /*
4185: Default matrix copy routine.
4186: */
4187: PetscErrorCode MatCopy_Basic(Mat A, Mat B, MatStructure str)
4188: {
4189: PetscInt i, rstart = 0, rend = 0, nz;
4190: const PetscInt *cwork;
4191: const PetscScalar *vwork;
4193: PetscFunctionBegin;
4194: if (B->assembled) PetscCall(MatZeroEntries(B));
4195: if (str == SAME_NONZERO_PATTERN) {
4196: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
4197: for (i = rstart; i < rend; i++) {
4198: PetscCall(MatGetRow(A, i, &nz, &cwork, &vwork));
4199: PetscCall(MatSetValues(B, 1, &i, nz, cwork, vwork, INSERT_VALUES));
4200: PetscCall(MatRestoreRow(A, i, &nz, &cwork, &vwork));
4201: }
4202: } else {
4203: PetscCall(MatAYPX(B, 0.0, A, str));
4204: }
4205: PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
4206: PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
4207: PetscFunctionReturn(PETSC_SUCCESS);
4208: }
4210: /*@
4211: MatCopy - Copies a matrix to another matrix.
4213: Collective
4215: Input Parameters:
4216: + A - the matrix
4217: - str - `SAME_NONZERO_PATTERN` or `DIFFERENT_NONZERO_PATTERN`
4219: Output Parameter:
4220: . B - where the copy is put
4222: Level: intermediate
4224: Notes:
4225: If you use `SAME_NONZERO_PATTERN` then the two matrices must have the same nonzero pattern or the routine will crash.
4227: `MatCopy()` copies the matrix entries of a matrix to another existing
4228: matrix (after first zeroing the second matrix). A related routine is
4229: `MatConvert()`, which first creates a new matrix and then copies the data.
4231: .seealso: [](ch_matrices), `Mat`, `MatConvert()`, `MatDuplicate()`
4232: @*/
4233: PetscErrorCode MatCopy(Mat A, Mat B, MatStructure str)
4234: {
4235: PetscInt i;
4237: PetscFunctionBegin;
4242: PetscCheckSameComm(A, 1, B, 2);
4243: MatCheckPreallocated(B, 2);
4244: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4245: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4246: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")", A->rmap->N, B->rmap->N,
4247: A->cmap->N, B->cmap->N);
4248: MatCheckPreallocated(A, 1);
4249: if (A == B) PetscFunctionReturn(PETSC_SUCCESS);
4251: PetscCall(PetscLogEventBegin(MAT_Copy, A, B, 0, 0));
4252: if (A->ops->copy) PetscUseTypeMethod(A, copy, B, str);
4253: else PetscCall(MatCopy_Basic(A, B, str));
4255: B->stencil.dim = A->stencil.dim;
4256: B->stencil.noc = A->stencil.noc;
4257: for (i = 0; i <= A->stencil.dim + (A->stencil.noc ? 0 : -1); i++) {
4258: B->stencil.dims[i] = A->stencil.dims[i];
4259: B->stencil.starts[i] = A->stencil.starts[i];
4260: }
4262: PetscCall(PetscLogEventEnd(MAT_Copy, A, B, 0, 0));
4263: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4264: PetscFunctionReturn(PETSC_SUCCESS);
4265: }
4267: /*@C
4268: MatConvert - Converts a matrix to another matrix, either of the same
4269: or different type.
4271: Collective
4273: Input Parameters:
4274: + mat - the matrix
4275: . newtype - new matrix type. Use `MATSAME` to create a new matrix of the
4276: same type as the original matrix.
4277: - reuse - denotes if the destination matrix is to be created or reused.
4278: Use `MAT_INPLACE_MATRIX` for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4279: `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX` (can only be used after the first call was made with `MAT_INITIAL_MATRIX`, causes the matrix space in M to be reused).
4281: Output Parameter:
4282: . M - pointer to place new matrix
4284: Level: intermediate
4286: Notes:
4287: `MatConvert()` first creates a new matrix and then copies the data from
4288: the first matrix. A related routine is `MatCopy()`, which copies the matrix
4289: entries of one matrix to another already existing matrix context.
4291: Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4292: the MPI communicator of the generated matrix is always the same as the communicator
4293: of the input matrix.
4295: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatDuplicate()`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
4296: @*/
4297: PetscErrorCode MatConvert(Mat mat, MatType newtype, MatReuse reuse, Mat *M)
4298: {
4299: PetscBool sametype, issame, flg;
4300: PetscBool3 issymmetric, ishermitian;
4301: char convname[256], mtype[256];
4302: Mat B;
4304: PetscFunctionBegin;
4307: PetscAssertPointer(M, 4);
4308: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4309: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4310: MatCheckPreallocated(mat, 1);
4312: PetscCall(PetscOptionsGetString(((PetscObject)mat)->options, ((PetscObject)mat)->prefix, "-matconvert_type", mtype, sizeof(mtype), &flg));
4313: if (flg) newtype = mtype;
4315: PetscCall(PetscObjectTypeCompare((PetscObject)mat, newtype, &sametype));
4316: PetscCall(PetscStrcmp(newtype, "same", &issame));
4317: PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M), PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires same input and output matrix");
4318: if (reuse == MAT_REUSE_MATRIX) {
4320: PetscCheck(mat != *M, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4321: }
4323: if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4324: PetscCall(PetscInfo(mat, "Early return for inplace %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4325: PetscFunctionReturn(PETSC_SUCCESS);
4326: }
4328: /* Cache Mat options because some converters use MatHeaderReplace */
4329: issymmetric = mat->symmetric;
4330: ishermitian = mat->hermitian;
4332: if ((sametype || issame) && (reuse == MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4333: PetscCall(PetscInfo(mat, "Calling duplicate for initial matrix %s %d %d\n", ((PetscObject)mat)->type_name, sametype, issame));
4334: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4335: } else {
4336: PetscErrorCode (*conv)(Mat, MatType, MatReuse, Mat *) = NULL;
4337: const char *prefix[3] = {"seq", "mpi", ""};
4338: PetscInt i;
4339: /*
4340: Order of precedence:
4341: 0) See if newtype is a superclass of the current matrix.
4342: 1) See if a specialized converter is known to the current matrix.
4343: 2) See if a specialized converter is known to the desired matrix class.
4344: 3) See if a good general converter is registered for the desired class
4345: (as of 6/27/03 only MATMPIADJ falls into this category).
4346: 4) See if a good general converter is known for the current matrix.
4347: 5) Use a really basic converter.
4348: */
4350: /* 0) See if newtype is a superclass of the current matrix.
4351: i.e mat is mpiaij and newtype is aij */
4352: for (i = 0; i < 2; i++) {
4353: PetscCall(PetscStrncpy(convname, prefix[i], sizeof(convname)));
4354: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4355: PetscCall(PetscStrcmp(convname, ((PetscObject)mat)->type_name, &flg));
4356: PetscCall(PetscInfo(mat, "Check superclass %s %s -> %d\n", convname, ((PetscObject)mat)->type_name, flg));
4357: if (flg) {
4358: if (reuse == MAT_INPLACE_MATRIX) {
4359: PetscCall(PetscInfo(mat, "Early return\n"));
4360: PetscFunctionReturn(PETSC_SUCCESS);
4361: } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4362: PetscCall(PetscInfo(mat, "Calling MatDuplicate\n"));
4363: PetscUseTypeMethod(mat, duplicate, MAT_COPY_VALUES, M);
4364: PetscFunctionReturn(PETSC_SUCCESS);
4365: } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4366: PetscCall(PetscInfo(mat, "Calling MatCopy\n"));
4367: PetscCall(MatCopy(mat, *M, SAME_NONZERO_PATTERN));
4368: PetscFunctionReturn(PETSC_SUCCESS);
4369: }
4370: }
4371: }
4372: /* 1) See if a specialized converter is known to the current matrix and the desired class */
4373: for (i = 0; i < 3; i++) {
4374: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4375: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4376: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4377: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4378: PetscCall(PetscStrlcat(convname, issame ? ((PetscObject)mat)->type_name : newtype, sizeof(convname)));
4379: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4380: PetscCall(PetscObjectQueryFunction((PetscObject)mat, convname, &conv));
4381: PetscCall(PetscInfo(mat, "Check specialized (1) %s (%s) -> %d\n", convname, ((PetscObject)mat)->type_name, !!conv));
4382: if (conv) goto foundconv;
4383: }
4385: /* 2) See if a specialized converter is known to the desired matrix class. */
4386: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
4387: PetscCall(MatSetSizes(B, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
4388: PetscCall(MatSetType(B, newtype));
4389: for (i = 0; i < 3; i++) {
4390: PetscCall(PetscStrncpy(convname, "MatConvert_", sizeof(convname)));
4391: PetscCall(PetscStrlcat(convname, ((PetscObject)mat)->type_name, sizeof(convname)));
4392: PetscCall(PetscStrlcat(convname, "_", sizeof(convname)));
4393: PetscCall(PetscStrlcat(convname, prefix[i], sizeof(convname)));
4394: PetscCall(PetscStrlcat(convname, newtype, sizeof(convname)));
4395: PetscCall(PetscStrlcat(convname, "_C", sizeof(convname)));
4396: PetscCall(PetscObjectQueryFunction((PetscObject)B, convname, &conv));
4397: PetscCall(PetscInfo(mat, "Check specialized (2) %s (%s) -> %d\n", convname, ((PetscObject)B)->type_name, !!conv));
4398: if (conv) {
4399: PetscCall(MatDestroy(&B));
4400: goto foundconv;
4401: }
4402: }
4404: /* 3) See if a good general converter is registered for the desired class */
4405: conv = B->ops->convertfrom;
4406: PetscCall(PetscInfo(mat, "Check convertfrom (%s) -> %d\n", ((PetscObject)B)->type_name, !!conv));
4407: PetscCall(MatDestroy(&B));
4408: if (conv) goto foundconv;
4410: /* 4) See if a good general converter is known for the current matrix */
4411: if (mat->ops->convert) conv = mat->ops->convert;
4412: PetscCall(PetscInfo(mat, "Check general convert (%s) -> %d\n", ((PetscObject)mat)->type_name, !!conv));
4413: if (conv) goto foundconv;
4415: /* 5) Use a really basic converter. */
4416: PetscCall(PetscInfo(mat, "Using MatConvert_Basic\n"));
4417: conv = MatConvert_Basic;
4419: foundconv:
4420: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4421: PetscCall((*conv)(mat, newtype, reuse, M));
4422: if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4423: /* the block sizes must be same if the mappings are copied over */
4424: (*M)->rmap->bs = mat->rmap->bs;
4425: (*M)->cmap->bs = mat->cmap->bs;
4426: PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4427: PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4428: (*M)->rmap->mapping = mat->rmap->mapping;
4429: (*M)->cmap->mapping = mat->cmap->mapping;
4430: }
4431: (*M)->stencil.dim = mat->stencil.dim;
4432: (*M)->stencil.noc = mat->stencil.noc;
4433: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4434: (*M)->stencil.dims[i] = mat->stencil.dims[i];
4435: (*M)->stencil.starts[i] = mat->stencil.starts[i];
4436: }
4437: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4438: }
4439: PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4441: /* Copy Mat options */
4442: if (issymmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_TRUE));
4443: else if (issymmetric == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_SYMMETRIC, PETSC_FALSE));
4444: if (ishermitian == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_TRUE));
4445: else if (ishermitian == PETSC_BOOL3_FALSE) PetscCall(MatSetOption(*M, MAT_HERMITIAN, PETSC_FALSE));
4446: PetscFunctionReturn(PETSC_SUCCESS);
4447: }
4449: /*@C
4450: MatFactorGetSolverType - Returns name of the package providing the factorization routines
4452: Not Collective
4454: Input Parameter:
4455: . mat - the matrix, must be a factored matrix
4457: Output Parameter:
4458: . type - the string name of the package (do not free this string)
4460: Level: intermediate
4462: Fortran Note:
4463: Pass in an empty string and the package name will be copied into it. Make sure the string is long enough.
4465: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatSolverType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`
4466: @*/
4467: PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4468: {
4469: PetscErrorCode (*conv)(Mat, MatSolverType *);
4471: PetscFunctionBegin;
4474: PetscAssertPointer(type, 2);
4475: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
4476: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorGetSolverType_C", &conv));
4477: if (conv) PetscCall((*conv)(mat, type));
4478: else *type = MATSOLVERPETSC;
4479: PetscFunctionReturn(PETSC_SUCCESS);
4480: }
4482: typedef struct _MatSolverTypeForSpecifcType *MatSolverTypeForSpecifcType;
4483: struct _MatSolverTypeForSpecifcType {
4484: MatType mtype;
4485: /* no entry for MAT_FACTOR_NONE */
4486: PetscErrorCode (*createfactor[MAT_FACTOR_NUM_TYPES - 1])(Mat, MatFactorType, Mat *);
4487: MatSolverTypeForSpecifcType next;
4488: };
4490: typedef struct _MatSolverTypeHolder *MatSolverTypeHolder;
4491: struct _MatSolverTypeHolder {
4492: char *name;
4493: MatSolverTypeForSpecifcType handlers;
4494: MatSolverTypeHolder next;
4495: };
4497: static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4499: /*@C
4500: MatSolverTypeRegister - Registers a `MatSolverType` that works for a particular matrix type
4502: Input Parameters:
4503: + package - name of the package, for example petsc or superlu
4504: . mtype - the matrix type that works with this package
4505: . ftype - the type of factorization supported by the package
4506: - createfactor - routine that will create the factored matrix ready to be used
4508: Level: developer
4510: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorGetSolverType()`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`,
4511: `MatGetFactor()`
4512: @*/
4513: PetscErrorCode MatSolverTypeRegister(MatSolverType package, MatType mtype, MatFactorType ftype, PetscErrorCode (*createfactor)(Mat, MatFactorType, Mat *))
4514: {
4515: MatSolverTypeHolder next = MatSolverTypeHolders, prev = NULL;
4516: PetscBool flg;
4517: MatSolverTypeForSpecifcType inext, iprev = NULL;
4519: PetscFunctionBegin;
4520: PetscCall(MatInitializePackage());
4521: if (!next) {
4522: PetscCall(PetscNew(&MatSolverTypeHolders));
4523: PetscCall(PetscStrallocpy(package, &MatSolverTypeHolders->name));
4524: PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4525: PetscCall(PetscStrallocpy(mtype, (char **)&MatSolverTypeHolders->handlers->mtype));
4526: MatSolverTypeHolders->handlers->createfactor[(int)ftype - 1] = createfactor;
4527: PetscFunctionReturn(PETSC_SUCCESS);
4528: }
4529: while (next) {
4530: PetscCall(PetscStrcasecmp(package, next->name, &flg));
4531: if (flg) {
4532: PetscCheck(next->handlers, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatSolverTypeHolder is missing handlers");
4533: inext = next->handlers;
4534: while (inext) {
4535: PetscCall(PetscStrcasecmp(mtype, inext->mtype, &flg));
4536: if (flg) {
4537: inext->createfactor[(int)ftype - 1] = createfactor;
4538: PetscFunctionReturn(PETSC_SUCCESS);
4539: }
4540: iprev = inext;
4541: inext = inext->next;
4542: }
4543: PetscCall(PetscNew(&iprev->next));
4544: PetscCall(PetscStrallocpy(mtype, (char **)&iprev->next->mtype));
4545: iprev->next->createfactor[(int)ftype - 1] = createfactor;
4546: PetscFunctionReturn(PETSC_SUCCESS);
4547: }
4548: prev = next;
4549: next = next->next;
4550: }
4551: PetscCall(PetscNew(&prev->next));
4552: PetscCall(PetscStrallocpy(package, &prev->next->name));
4553: PetscCall(PetscNew(&prev->next->handlers));
4554: PetscCall(PetscStrallocpy(mtype, (char **)&prev->next->handlers->mtype));
4555: prev->next->handlers->createfactor[(int)ftype - 1] = createfactor;
4556: PetscFunctionReturn(PETSC_SUCCESS);
4557: }
4559: /*@C
4560: MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4562: Input Parameters:
4563: + type - name of the package, for example petsc or superlu, if this is 'NULL' then the first result that satisfies the other criteria is returned
4564: . ftype - the type of factorization supported by the type
4565: - mtype - the matrix type that works with this type
4567: Output Parameters:
4568: + foundtype - `PETSC_TRUE` if the type was registered
4569: . foundmtype - `PETSC_TRUE` if the type supports the requested mtype
4570: - createfactor - routine that will create the factored matrix ready to be used or `NULL` if not found
4572: Calling sequence of `createfactor`:
4573: + A - the matrix providing the factor matrix
4574: . mtype - the `MatType` of the factor requested
4575: - B - the new factor matrix that responds to MatXXFactorSymbolic,Numeric() functions, such as `MatLUFactorSymbolic()`
4577: Level: developer
4579: Note:
4580: When `type` is `NULL` the available functions are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4581: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4582: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4584: .seealso: [](ch_matrices), `Mat`, `MatFactorType`, `MatType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`,
4585: `MatInitializePackage()`
4586: @*/
4587: PetscErrorCode MatSolverTypeGet(MatSolverType type, MatType mtype, MatFactorType ftype, PetscBool *foundtype, PetscBool *foundmtype, PetscErrorCode (**createfactor)(Mat A, MatFactorType mtype, Mat *B))
4588: {
4589: MatSolverTypeHolder next = MatSolverTypeHolders;
4590: PetscBool flg;
4591: MatSolverTypeForSpecifcType inext;
4593: PetscFunctionBegin;
4594: if (foundtype) *foundtype = PETSC_FALSE;
4595: if (foundmtype) *foundmtype = PETSC_FALSE;
4596: if (createfactor) *createfactor = NULL;
4598: if (type) {
4599: while (next) {
4600: PetscCall(PetscStrcasecmp(type, next->name, &flg));
4601: if (flg) {
4602: if (foundtype) *foundtype = PETSC_TRUE;
4603: inext = next->handlers;
4604: while (inext) {
4605: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4606: if (flg) {
4607: if (foundmtype) *foundmtype = PETSC_TRUE;
4608: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4609: PetscFunctionReturn(PETSC_SUCCESS);
4610: }
4611: inext = inext->next;
4612: }
4613: }
4614: next = next->next;
4615: }
4616: } else {
4617: while (next) {
4618: inext = next->handlers;
4619: while (inext) {
4620: PetscCall(PetscStrcmp(mtype, inext->mtype, &flg));
4621: if (flg && inext->createfactor[(int)ftype - 1]) {
4622: if (foundtype) *foundtype = PETSC_TRUE;
4623: if (foundmtype) *foundmtype = PETSC_TRUE;
4624: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4625: PetscFunctionReturn(PETSC_SUCCESS);
4626: }
4627: inext = inext->next;
4628: }
4629: next = next->next;
4630: }
4631: /* try with base classes inext->mtype */
4632: next = MatSolverTypeHolders;
4633: while (next) {
4634: inext = next->handlers;
4635: while (inext) {
4636: PetscCall(PetscStrbeginswith(mtype, inext->mtype, &flg));
4637: if (flg && inext->createfactor[(int)ftype - 1]) {
4638: if (foundtype) *foundtype = PETSC_TRUE;
4639: if (foundmtype) *foundmtype = PETSC_TRUE;
4640: if (createfactor) *createfactor = inext->createfactor[(int)ftype - 1];
4641: PetscFunctionReturn(PETSC_SUCCESS);
4642: }
4643: inext = inext->next;
4644: }
4645: next = next->next;
4646: }
4647: }
4648: PetscFunctionReturn(PETSC_SUCCESS);
4649: }
4651: PetscErrorCode MatSolverTypeDestroy(void)
4652: {
4653: MatSolverTypeHolder next = MatSolverTypeHolders, prev;
4654: MatSolverTypeForSpecifcType inext, iprev;
4656: PetscFunctionBegin;
4657: while (next) {
4658: PetscCall(PetscFree(next->name));
4659: inext = next->handlers;
4660: while (inext) {
4661: PetscCall(PetscFree(inext->mtype));
4662: iprev = inext;
4663: inext = inext->next;
4664: PetscCall(PetscFree(iprev));
4665: }
4666: prev = next;
4667: next = next->next;
4668: PetscCall(PetscFree(prev));
4669: }
4670: MatSolverTypeHolders = NULL;
4671: PetscFunctionReturn(PETSC_SUCCESS);
4672: }
4674: /*@C
4675: MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4677: Logically Collective
4679: Input Parameter:
4680: . mat - the matrix
4682: Output Parameter:
4683: . flg - `PETSC_TRUE` if uses the ordering
4685: Level: developer
4687: Note:
4688: Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4689: packages do not, thus we want to skip generating the ordering when it is not needed or used.
4691: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4692: @*/
4693: PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4694: {
4695: PetscFunctionBegin;
4696: *flg = mat->canuseordering;
4697: PetscFunctionReturn(PETSC_SUCCESS);
4698: }
4700: /*@C
4701: MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4703: Logically Collective
4705: Input Parameters:
4706: + mat - the matrix obtained with `MatGetFactor()`
4707: - ftype - the factorization type to be used
4709: Output Parameter:
4710: . otype - the preferred ordering type
4712: Level: developer
4714: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatFactorType`, `MatOrderingType`, `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4715: @*/
4716: PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4717: {
4718: PetscFunctionBegin;
4719: *otype = mat->preferredordering[ftype];
4720: PetscCheck(*otype, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MatFactor did not have a preferred ordering");
4721: PetscFunctionReturn(PETSC_SUCCESS);
4722: }
4724: /*@C
4725: MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic,Numeric()
4727: Collective
4729: Input Parameters:
4730: + mat - the matrix
4731: . type - name of solver type, for example, superlu, petsc (to use PETSc's solver if it is available), if this is 'NULL' then the first result that satisfies
4732: the other criteria is returned
4733: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4735: Output Parameter:
4736: . f - the factor matrix used with MatXXFactorSymbolic,Numeric() calls. Can be `NULL` in some cases, see notes below.
4738: Options Database Keys:
4739: + -pc_factor_mat_solver_type <type> - choose the type at run time. When using `KSP` solvers
4740: - -mat_factor_bind_factorization <host, device> - Where to do matrix factorization? Default is device (might consume more device memory.
4741: One can choose host to save device memory). Currently only supported with `MATSEQAIJCUSPARSE` matrices.
4743: Level: intermediate
4745: Notes:
4746: The return matrix can be `NULL` if the requested factorization is not available, since some combinations of matrix types and factorization
4747: types registered with `MatSolverTypeRegister()` cannot be fully tested if not at runtime.
4749: Users usually access the factorization solvers via `KSP`
4751: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4752: such as pastix, superlu, mumps etc. PETSc must have been ./configure to use the external solver, using the option --download-package or --with-package-dir
4754: When `type` is `NULL` the available results are searched for based on the order of the calls to `MatSolverTypeRegister()` in `MatInitializePackage()`.
4755: Since different PETSc configurations may have different external solvers, seemingly identical runs with different PETSc configurations may use a different solver.
4756: For example if one configuration had --download-mumps while a different one had --download-superlu_dist.
4758: Some of the packages have options for controlling the factorization, these are in the form -prefix_mat_packagename_packageoption
4759: where prefix is normally obtained from the calling `KSP`/`PC`. If `MatGetFactor()` is called directly one can set
4760: call `MatSetOptionsPrefixFactor()` on the originating matrix or `MatSetOptionsPrefix()` on the resulting factor matrix.
4762: Developer Note:
4763: This should actually be called `MatCreateFactor()` since it creates a new factor object
4765: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `KSP`, `MatSolverType`, `MatFactorType`, `MatCopy()`, `MatDuplicate()`,
4766: `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`, `MatSolverTypeGet()`
4767: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatInitializePackage()`
4768: @*/
4769: PetscErrorCode MatGetFactor(Mat mat, MatSolverType type, MatFactorType ftype, Mat *f)
4770: {
4771: PetscBool foundtype, foundmtype;
4772: PetscErrorCode (*conv)(Mat, MatFactorType, Mat *);
4774: PetscFunctionBegin;
4778: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4779: MatCheckPreallocated(mat, 1);
4781: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, &foundtype, &foundmtype, &conv));
4782: if (!foundtype) {
4783: if (type) {
4784: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s", type, MatFactorTypes[ftype],
4785: ((PetscObject)mat)->type_name, type);
4786: } else {
4787: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "Could not locate a solver type for factorization type %s and matrix type %s.", MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4788: }
4789: }
4790: PetscCheck(foundmtype, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support matrix type %s", type, ((PetscObject)mat)->type_name);
4791: PetscCheck(conv, PetscObjectComm((PetscObject)mat), PETSC_ERR_MISSING_FACTOR, "MatSolverType %s does not support factorization type %s for matrix type %s", type, MatFactorTypes[ftype], ((PetscObject)mat)->type_name);
4793: PetscCall((*conv)(mat, ftype, f));
4794: if (mat->factorprefix) PetscCall(MatSetOptionsPrefix(*f, mat->factorprefix));
4795: PetscFunctionReturn(PETSC_SUCCESS);
4796: }
4798: /*@C
4799: MatGetFactorAvailable - Returns a a flag if matrix supports particular type and factor type
4801: Not Collective
4803: Input Parameters:
4804: + mat - the matrix
4805: . type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4806: - ftype - factor type, `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`
4808: Output Parameter:
4809: . flg - PETSC_TRUE if the factorization is available
4811: Level: intermediate
4813: Notes:
4814: Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4815: such as pastix, superlu, mumps etc.
4817: PETSc must have been ./configure to use the external solver, using the option --download-package
4819: Developer Note:
4820: This should actually be called `MatCreateFactorAvailable()` since `MatGetFactor()` creates a new factor object
4822: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatSolverType`, `MatFactorType`, `MatGetFactor()`, `MatCopy()`, `MatDuplicate()`, `MatSolverTypeRegister()`,
4823: `MAT_FACTOR_LU`, `MAT_FACTOR_CHOLESKY`, `MAT_FACTOR_ICC`, `MAT_FACTOR_ILU`, `MAT_FACTOR_QR`, `MatSolverTypeGet()`
4824: @*/
4825: PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type, MatFactorType ftype, PetscBool *flg)
4826: {
4827: PetscErrorCode (*gconv)(Mat, MatFactorType, Mat *);
4829: PetscFunctionBegin;
4832: PetscAssertPointer(flg, 4);
4834: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4835: MatCheckPreallocated(mat, 1);
4837: PetscCall(MatSolverTypeGet(type, ((PetscObject)mat)->type_name, ftype, NULL, NULL, &gconv));
4838: *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4839: PetscFunctionReturn(PETSC_SUCCESS);
4840: }
4842: /*@
4843: MatDuplicate - Duplicates a matrix including the non-zero structure.
4845: Collective
4847: Input Parameters:
4848: + mat - the matrix
4849: - op - One of `MAT_DO_NOT_COPY_VALUES`, `MAT_COPY_VALUES`, or `MAT_SHARE_NONZERO_PATTERN`.
4850: See the manual page for `MatDuplicateOption()` for an explanation of these options.
4852: Output Parameter:
4853: . M - pointer to place new matrix
4855: Level: intermediate
4857: Notes:
4858: You cannot change the nonzero pattern for the parent or child matrix later if you use `MAT_SHARE_NONZERO_PATTERN`.
4860: If `op` is not `MAT_COPY_VALUES` the numerical values in the new matrix are zeroed.
4862: May be called with an unassembled input `Mat` if `MAT_DO_NOT_COPY_VALUES` is used, in which case the output `Mat` is unassembled as well.
4864: When original mat is a product of matrix operation, e.g., an output of `MatMatMult()` or `MatCreateSubMatrix()`, only the matrix data structure of `mat`
4865: is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated.
4866: User should not use `MatDuplicate()` to create new matrix `M` if `M` is intended to be reused as the product of matrix operation.
4868: .seealso: [](ch_matrices), `Mat`, `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4869: @*/
4870: PetscErrorCode MatDuplicate(Mat mat, MatDuplicateOption op, Mat *M)
4871: {
4872: Mat B;
4873: VecType vtype;
4874: PetscInt i;
4875: PetscObject dm, container_h, container_d;
4876: void (*viewf)(void);
4878: PetscFunctionBegin;
4881: PetscAssertPointer(M, 3);
4882: PetscCheck(op != MAT_COPY_VALUES || mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "MAT_COPY_VALUES not allowed for unassembled matrix");
4883: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
4884: MatCheckPreallocated(mat, 1);
4886: *M = NULL;
4887: PetscCall(PetscLogEventBegin(MAT_Convert, mat, 0, 0, 0));
4888: PetscUseTypeMethod(mat, duplicate, op, M);
4889: PetscCall(PetscLogEventEnd(MAT_Convert, mat, 0, 0, 0));
4890: B = *M;
4892: PetscCall(MatGetOperation(mat, MATOP_VIEW, &viewf));
4893: if (viewf) PetscCall(MatSetOperation(B, MATOP_VIEW, viewf));
4894: PetscCall(MatGetVecType(mat, &vtype));
4895: PetscCall(MatSetVecType(B, vtype));
4897: B->stencil.dim = mat->stencil.dim;
4898: B->stencil.noc = mat->stencil.noc;
4899: for (i = 0; i <= mat->stencil.dim + (mat->stencil.noc ? 0 : -1); i++) {
4900: B->stencil.dims[i] = mat->stencil.dims[i];
4901: B->stencil.starts[i] = mat->stencil.starts[i];
4902: }
4904: B->nooffproczerorows = mat->nooffproczerorows;
4905: B->nooffprocentries = mat->nooffprocentries;
4907: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_dm", &dm));
4908: if (dm) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_dm", dm));
4909: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Host", &container_h));
4910: if (container_h) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Host", container_h));
4911: PetscCall(PetscObjectQuery((PetscObject)mat, "__PETSc_MatCOOStruct_Device", &container_d));
4912: if (container_d) PetscCall(PetscObjectCompose((PetscObject)B, "__PETSc_MatCOOStruct_Device", container_d));
4913: PetscCall(PetscObjectStateIncrease((PetscObject)B));
4914: PetscFunctionReturn(PETSC_SUCCESS);
4915: }
4917: /*@
4918: MatGetDiagonal - Gets the diagonal of a matrix as a `Vec`
4920: Logically Collective
4922: Input Parameter:
4923: . mat - the matrix
4925: Output Parameter:
4926: . v - the diagonal of the matrix
4928: Level: intermediate
4930: Note:
4931: If `mat` has local sizes `n` x `m`, this routine fills the first `ndiag = min(n, m)` entries
4932: of `v` with the diagonal values. Thus `v` must have local size of at least `ndiag`. If `v`
4933: is larger than `ndiag`, the values of the remaining entries are unspecified.
4935: Currently only correct in parallel for square matrices.
4937: .seealso: [](ch_matrices), `Mat`, `Vec`, `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4938: @*/
4939: PetscErrorCode MatGetDiagonal(Mat mat, Vec v)
4940: {
4941: PetscFunctionBegin;
4945: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4946: MatCheckPreallocated(mat, 1);
4947: if (PetscDefined(USE_DEBUG)) {
4948: PetscInt nv, row, col, ndiag;
4950: PetscCall(VecGetLocalSize(v, &nv));
4951: PetscCall(MatGetLocalSize(mat, &row, &col));
4952: ndiag = PetscMin(row, col);
4953: PetscCheck(nv >= ndiag, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Nonconforming Mat and Vec. Vec local size %" PetscInt_FMT " < Mat local diagonal length %" PetscInt_FMT, nv, ndiag);
4954: }
4956: PetscUseTypeMethod(mat, getdiagonal, v);
4957: PetscCall(PetscObjectStateIncrease((PetscObject)v));
4958: PetscFunctionReturn(PETSC_SUCCESS);
4959: }
4961: /*@C
4962: MatGetRowMin - Gets the minimum value (of the real part) of each
4963: row of the matrix
4965: Logically Collective
4967: Input Parameter:
4968: . mat - the matrix
4970: Output Parameters:
4971: + v - the vector for storing the maximums
4972: - idx - the indices of the column found for each row (optional)
4974: Level: intermediate
4976: Note:
4977: The result of this call are the same as if one converted the matrix to dense format
4978: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4980: This code is only implemented for a couple of matrix formats.
4982: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`,
4983: `MatGetRowMax()`
4984: @*/
4985: PetscErrorCode MatGetRowMin(Mat mat, Vec v, PetscInt idx[])
4986: {
4987: PetscFunctionBegin;
4991: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
4993: if (!mat->cmap->N) {
4994: PetscCall(VecSet(v, PETSC_MAX_REAL));
4995: if (idx) {
4996: PetscInt i, m = mat->rmap->n;
4997: for (i = 0; i < m; i++) idx[i] = -1;
4998: }
4999: } else {
5000: MatCheckPreallocated(mat, 1);
5001: }
5002: PetscUseTypeMethod(mat, getrowmin, v, idx);
5003: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5004: PetscFunctionReturn(PETSC_SUCCESS);
5005: }
5007: /*@C
5008: MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
5009: row of the matrix
5011: Logically Collective
5013: Input Parameter:
5014: . mat - the matrix
5016: Output Parameters:
5017: + v - the vector for storing the minimums
5018: - idx - the indices of the column found for each row (or `NULL` if not needed)
5020: Level: intermediate
5022: Notes:
5023: if a row is completely empty or has only 0.0 values then the `idx` value for that
5024: row is 0 (the first column).
5026: This code is only implemented for a couple of matrix formats.
5028: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
5029: @*/
5030: PetscErrorCode MatGetRowMinAbs(Mat mat, Vec v, PetscInt idx[])
5031: {
5032: PetscFunctionBegin;
5036: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5037: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5039: if (!mat->cmap->N) {
5040: PetscCall(VecSet(v, 0.0));
5041: if (idx) {
5042: PetscInt i, m = mat->rmap->n;
5043: for (i = 0; i < m; i++) idx[i] = -1;
5044: }
5045: } else {
5046: MatCheckPreallocated(mat, 1);
5047: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5048: PetscUseTypeMethod(mat, getrowminabs, v, idx);
5049: }
5050: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5051: PetscFunctionReturn(PETSC_SUCCESS);
5052: }
5054: /*@C
5055: MatGetRowMax - Gets the maximum value (of the real part) of each
5056: row of the matrix
5058: Logically Collective
5060: Input Parameter:
5061: . mat - the matrix
5063: Output Parameters:
5064: + v - the vector for storing the maximums
5065: - idx - the indices of the column found for each row (optional)
5067: Level: intermediate
5069: Notes:
5070: The result of this call are the same as if one converted the matrix to dense format
5071: and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
5073: This code is only implemented for a couple of matrix formats.
5075: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5076: @*/
5077: PetscErrorCode MatGetRowMax(Mat mat, Vec v, PetscInt idx[])
5078: {
5079: PetscFunctionBegin;
5083: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5085: if (!mat->cmap->N) {
5086: PetscCall(VecSet(v, PETSC_MIN_REAL));
5087: if (idx) {
5088: PetscInt i, m = mat->rmap->n;
5089: for (i = 0; i < m; i++) idx[i] = -1;
5090: }
5091: } else {
5092: MatCheckPreallocated(mat, 1);
5093: PetscUseTypeMethod(mat, getrowmax, v, idx);
5094: }
5095: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5096: PetscFunctionReturn(PETSC_SUCCESS);
5097: }
5099: /*@C
5100: MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
5101: row of the matrix
5103: Logically Collective
5105: Input Parameter:
5106: . mat - the matrix
5108: Output Parameters:
5109: + v - the vector for storing the maximums
5110: - idx - the indices of the column found for each row (or `NULL` if not needed)
5112: Level: intermediate
5114: Notes:
5115: if a row is completely empty or has only 0.0 values then the `idx` value for that
5116: row is 0 (the first column).
5118: This code is only implemented for a couple of matrix formats.
5120: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMinAbs()`
5121: @*/
5122: PetscErrorCode MatGetRowMaxAbs(Mat mat, Vec v, PetscInt idx[])
5123: {
5124: PetscFunctionBegin;
5128: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5130: if (!mat->cmap->N) {
5131: PetscCall(VecSet(v, 0.0));
5132: if (idx) {
5133: PetscInt i, m = mat->rmap->n;
5134: for (i = 0; i < m; i++) idx[i] = -1;
5135: }
5136: } else {
5137: MatCheckPreallocated(mat, 1);
5138: if (idx) PetscCall(PetscArrayzero(idx, mat->rmap->n));
5139: PetscUseTypeMethod(mat, getrowmaxabs, v, idx);
5140: }
5141: PetscCall(PetscObjectStateIncrease((PetscObject)v));
5142: PetscFunctionReturn(PETSC_SUCCESS);
5143: }
5145: /*@
5146: MatGetRowSum - Gets the sum of each row of the matrix
5148: Logically or Neighborhood Collective
5150: Input Parameter:
5151: . mat - the matrix
5153: Output Parameter:
5154: . v - the vector for storing the sum of rows
5156: Level: intermediate
5158: Note:
5159: This code is slow since it is not currently specialized for different formats
5161: .seealso: [](ch_matrices), `Mat`, `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`, `MatGetRowMaxAbs()`, `MatGetRowMinAbs()`
5162: @*/
5163: PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5164: {
5165: Vec ones;
5167: PetscFunctionBegin;
5171: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5172: MatCheckPreallocated(mat, 1);
5173: PetscCall(MatCreateVecs(mat, &ones, NULL));
5174: PetscCall(VecSet(ones, 1.));
5175: PetscCall(MatMult(mat, ones, v));
5176: PetscCall(VecDestroy(&ones));
5177: PetscFunctionReturn(PETSC_SUCCESS);
5178: }
5180: /*@
5181: MatTransposeSetPrecursor - Set the matrix from which the second matrix will receive numerical transpose data with a call to `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B)
5182: when B was not obtained with `MatTranspose`(A,`MAT_INITIAL_MATRIX`,&B)
5184: Collective
5186: Input Parameter:
5187: . mat - the matrix to provide the transpose
5189: Output Parameter:
5190: . B - the matrix to contain the transpose; it MUST have the nonzero structure of the transpose of A or the code will crash or generate incorrect results
5192: Level: advanced
5194: Note:
5195: Normally the use of `MatTranspose`(A, `MAT_REUSE_MATRIX`, &B) requires that `B` was obtained with a call to `MatTranspose`(A, `MAT_INITIAL_MATRIX`, &B). This
5196: routine allows bypassing that call.
5198: .seealso: [](ch_matrices), `Mat`, `MatTransposeSymbolic()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5199: @*/
5200: PetscErrorCode MatTransposeSetPrecursor(Mat mat, Mat B)
5201: {
5202: PetscContainer rB = NULL;
5203: MatParentState *rb = NULL;
5205: PetscFunctionBegin;
5206: PetscCall(PetscNew(&rb));
5207: rb->id = ((PetscObject)mat)->id;
5208: rb->state = 0;
5209: PetscCall(MatGetNonzeroState(mat, &rb->nonzerostate));
5210: PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)B), &rB));
5211: PetscCall(PetscContainerSetPointer(rB, rb));
5212: PetscCall(PetscContainerSetUserDestroy(rB, PetscContainerUserDestroyDefault));
5213: PetscCall(PetscObjectCompose((PetscObject)B, "MatTransposeParent", (PetscObject)rB));
5214: PetscCall(PetscObjectDereference((PetscObject)rB));
5215: PetscFunctionReturn(PETSC_SUCCESS);
5216: }
5218: /*@
5219: MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5221: Collective
5223: Input Parameters:
5224: + mat - the matrix to transpose
5225: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5227: Output Parameter:
5228: . B - the transpose
5230: Level: intermediate
5232: Notes:
5233: If you use `MAT_INPLACE_MATRIX` then you must pass in `&mat` for `B`
5235: `MAT_REUSE_MATRIX` uses the `B` matrix obtained from a previous call to this function with `MAT_INITIAL_MATRIX`. If you already have a matrix to contain the
5236: transpose, call `MatTransposeSetPrecursor(mat, B)` before calling this routine.
5238: If the nonzero structure of mat changed from the previous call to this function with the same matrices an error will be generated for some matrix types.
5240: Consider using `MatCreateTranspose()` instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5242: If mat is unchanged from the last call this function returns immediately without recomputing the result
5244: If you only need the symbolic transpose, and not the numerical values, use `MatTransposeSymbolic()`
5246: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`,
5247: `MatTransposeSymbolic()`, `MatCreateTranspose()`
5248: @*/
5249: PetscErrorCode MatTranspose(Mat mat, MatReuse reuse, Mat *B)
5250: {
5251: PetscContainer rB = NULL;
5252: MatParentState *rb = NULL;
5254: PetscFunctionBegin;
5257: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5258: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5259: PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX requires last matrix to match first");
5260: PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Perhaps you mean MAT_INPLACE_MATRIX");
5261: MatCheckPreallocated(mat, 1);
5262: if (reuse == MAT_REUSE_MATRIX) {
5263: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5264: PetscCheck(rB, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose(). Suggest MatTransposeSetPrecursor().");
5265: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5266: PetscCheck(rb->id == ((PetscObject)mat)->id, PetscObjectComm((PetscObject)*B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5267: if (rb->state == ((PetscObject)mat)->state) PetscFunctionReturn(PETSC_SUCCESS);
5268: }
5270: PetscCall(PetscLogEventBegin(MAT_Transpose, mat, 0, 0, 0));
5271: if (reuse != MAT_INPLACE_MATRIX || mat->symmetric != PETSC_BOOL3_TRUE) {
5272: PetscUseTypeMethod(mat, transpose, reuse, B);
5273: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5274: }
5275: PetscCall(PetscLogEventEnd(MAT_Transpose, mat, 0, 0, 0));
5277: if (reuse == MAT_INITIAL_MATRIX) PetscCall(MatTransposeSetPrecursor(mat, *B));
5278: if (reuse != MAT_INPLACE_MATRIX) {
5279: PetscCall(PetscObjectQuery((PetscObject)*B, "MatTransposeParent", (PetscObject *)&rB));
5280: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5281: rb->state = ((PetscObject)mat)->state;
5282: rb->nonzerostate = mat->nonzerostate;
5283: }
5284: PetscFunctionReturn(PETSC_SUCCESS);
5285: }
5287: /*@
5288: MatTransposeSymbolic - Computes the symbolic part of the transpose of a matrix.
5290: Collective
5292: Input Parameter:
5293: . A - the matrix to transpose
5295: Output Parameter:
5296: . B - the transpose. This is a complete matrix but the numerical portion is invalid. One can call `MatTranspose`(A,`MAT_REUSE_MATRIX`,&B) to compute the
5297: numerical portion.
5299: Level: intermediate
5301: Note:
5302: This is not supported for many matrix types, use `MatTranspose()` in those cases
5304: .seealso: [](ch_matrices), `Mat`, `MatTransposeSetPrecursor()`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`, `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, `MAT_INPLACE_MATRIX`
5305: @*/
5306: PetscErrorCode MatTransposeSymbolic(Mat A, Mat *B)
5307: {
5308: PetscFunctionBegin;
5311: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5312: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5313: PetscCall(PetscLogEventBegin(MAT_Transpose, A, 0, 0, 0));
5314: PetscUseTypeMethod(A, transposesymbolic, B);
5315: PetscCall(PetscLogEventEnd(MAT_Transpose, A, 0, 0, 0));
5317: PetscCall(MatTransposeSetPrecursor(A, *B));
5318: PetscFunctionReturn(PETSC_SUCCESS);
5319: }
5321: PetscErrorCode MatTransposeCheckNonzeroState_Private(Mat A, Mat B)
5322: {
5323: PetscContainer rB;
5324: MatParentState *rb;
5326: PetscFunctionBegin;
5329: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5330: PetscCheck(!A->factortype, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5331: PetscCall(PetscObjectQuery((PetscObject)B, "MatTransposeParent", (PetscObject *)&rB));
5332: PetscCheck(rB, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from call to MatTranspose()");
5333: PetscCall(PetscContainerGetPointer(rB, (void **)&rb));
5334: PetscCheck(rb->id == ((PetscObject)A)->id, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONG, "Reuse matrix used was not generated from input matrix");
5335: PetscCheck(rb->nonzerostate == A->nonzerostate, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Reuse matrix has changed nonzero structure");
5336: PetscFunctionReturn(PETSC_SUCCESS);
5337: }
5339: /*@
5340: MatIsTranspose - Test whether a matrix is another one's transpose,
5341: or its own, in which case it tests symmetry.
5343: Collective
5345: Input Parameters:
5346: + A - the matrix to test
5347: . B - the matrix to test against, this can equal the first parameter
5348: - tol - tolerance, differences between entries smaller than this are counted as zero
5350: Output Parameter:
5351: . flg - the result
5353: Level: intermediate
5355: Notes:
5356: Only available for `MATAIJ` matrices.
5358: The sequential algorithm has a running time of the order of the number of nonzeros; the parallel
5359: test involves parallel copies of the block off-diagonal parts of the matrix.
5361: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5362: @*/
5363: PetscErrorCode MatIsTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5364: {
5365: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5367: PetscFunctionBegin;
5370: PetscAssertPointer(flg, 4);
5371: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsTranspose_C", &f));
5372: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsTranspose_C", &g));
5373: *flg = PETSC_FALSE;
5374: if (f && g) {
5375: PetscCheck(f == g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for symmetry test");
5376: PetscCall((*f)(A, B, tol, flg));
5377: } else {
5378: MatType mattype;
5380: PetscCall(MatGetType(f ? B : A, &mattype));
5381: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Matrix of type %s does not support checking for transpose", mattype);
5382: }
5383: PetscFunctionReturn(PETSC_SUCCESS);
5384: }
5386: /*@
5387: MatHermitianTranspose - Computes an in-place or out-of-place Hermitian transpose of a matrix in complex conjugate.
5389: Collective
5391: Input Parameters:
5392: + mat - the matrix to transpose and complex conjugate
5393: - reuse - either `MAT_INITIAL_MATRIX`, `MAT_REUSE_MATRIX`, or `MAT_INPLACE_MATRIX`
5395: Output Parameter:
5396: . B - the Hermitian transpose
5398: Level: intermediate
5400: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5401: @*/
5402: PetscErrorCode MatHermitianTranspose(Mat mat, MatReuse reuse, Mat *B)
5403: {
5404: PetscFunctionBegin;
5405: PetscCall(MatTranspose(mat, reuse, B));
5406: #if defined(PETSC_USE_COMPLEX)
5407: PetscCall(MatConjugate(*B));
5408: #endif
5409: PetscFunctionReturn(PETSC_SUCCESS);
5410: }
5412: /*@
5413: MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5415: Collective
5417: Input Parameters:
5418: + A - the matrix to test
5419: . B - the matrix to test against, this can equal the first parameter
5420: - tol - tolerance, differences between entries smaller than this are counted as zero
5422: Output Parameter:
5423: . flg - the result
5425: Level: intermediate
5427: Notes:
5428: Only available for `MATAIJ` matrices.
5430: The sequential algorithm
5431: has a running time of the order of the number of nonzeros; the parallel
5432: test involves parallel copies of the block off-diagonal parts of the matrix.
5434: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5435: @*/
5436: PetscErrorCode MatIsHermitianTranspose(Mat A, Mat B, PetscReal tol, PetscBool *flg)
5437: {
5438: PetscErrorCode (*f)(Mat, Mat, PetscReal, PetscBool *), (*g)(Mat, Mat, PetscReal, PetscBool *);
5440: PetscFunctionBegin;
5443: PetscAssertPointer(flg, 4);
5444: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatIsHermitianTranspose_C", &f));
5445: PetscCall(PetscObjectQueryFunction((PetscObject)B, "MatIsHermitianTranspose_C", &g));
5446: if (f && g) {
5447: PetscCheck(f != g, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_NOTSAMETYPE, "Matrices do not have the same comparator for Hermitian test");
5448: PetscCall((*f)(A, B, tol, flg));
5449: }
5450: PetscFunctionReturn(PETSC_SUCCESS);
5451: }
5453: /*@
5454: MatPermute - Creates a new matrix with rows and columns permuted from the
5455: original.
5457: Collective
5459: Input Parameters:
5460: + mat - the matrix to permute
5461: . row - row permutation, each processor supplies only the permutation for its rows
5462: - col - column permutation, each processor supplies only the permutation for its columns
5464: Output Parameter:
5465: . B - the permuted matrix
5467: Level: advanced
5469: Note:
5470: The index sets map from row/col of permuted matrix to row/col of original matrix.
5471: The index sets should be on the same communicator as mat and have the same local sizes.
5473: Developer Note:
5474: If you want to implement `MatPermute()` for a matrix type, and your approach doesn't
5475: exploit the fact that row and col are permutations, consider implementing the
5476: more general `MatCreateSubMatrix()` instead.
5478: .seealso: [](ch_matrices), `Mat`, `MatGetOrdering()`, `ISAllGather()`, `MatCreateSubMatrix()`
5479: @*/
5480: PetscErrorCode MatPermute(Mat mat, IS row, IS col, Mat *B)
5481: {
5482: PetscFunctionBegin;
5487: PetscAssertPointer(B, 4);
5488: PetscCheckSameComm(mat, 1, row, 2);
5489: if (row != col) PetscCheckSameComm(row, 2, col, 3);
5490: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5491: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5492: PetscCheck(mat->ops->permute || mat->ops->createsubmatrix, PETSC_COMM_SELF, PETSC_ERR_SUP, "MatPermute not available for Mat type %s", ((PetscObject)mat)->type_name);
5493: MatCheckPreallocated(mat, 1);
5495: if (mat->ops->permute) {
5496: PetscUseTypeMethod(mat, permute, row, col, B);
5497: PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5498: } else {
5499: PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5500: }
5501: PetscFunctionReturn(PETSC_SUCCESS);
5502: }
5504: /*@
5505: MatEqual - Compares two matrices.
5507: Collective
5509: Input Parameters:
5510: + A - the first matrix
5511: - B - the second matrix
5513: Output Parameter:
5514: . flg - `PETSC_TRUE` if the matrices are equal; `PETSC_FALSE` otherwise.
5516: Level: intermediate
5518: .seealso: [](ch_matrices), `Mat`
5519: @*/
5520: PetscErrorCode MatEqual(Mat A, Mat B, PetscBool *flg)
5521: {
5522: PetscFunctionBegin;
5527: PetscAssertPointer(flg, 3);
5528: PetscCheckSameComm(A, 1, B, 2);
5529: MatCheckPreallocated(A, 1);
5530: MatCheckPreallocated(B, 2);
5531: PetscCheck(A->assembled, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5532: PetscCheck(B->assembled, PetscObjectComm((PetscObject)B), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5533: PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N, PetscObjectComm((PetscObject)A), PETSC_ERR_ARG_SIZ, "Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT, A->rmap->N, B->rmap->N, A->cmap->N,
5534: B->cmap->N);
5535: if (A->ops->equal && A->ops->equal == B->ops->equal) {
5536: PetscUseTypeMethod(A, equal, B, flg);
5537: } else {
5538: PetscCall(MatMultEqual(A, B, 10, flg));
5539: }
5540: PetscFunctionReturn(PETSC_SUCCESS);
5541: }
5543: /*@
5544: MatDiagonalScale - Scales a matrix on the left and right by diagonal
5545: matrices that are stored as vectors. Either of the two scaling
5546: matrices can be `NULL`.
5548: Collective
5550: Input Parameters:
5551: + mat - the matrix to be scaled
5552: . l - the left scaling vector (or `NULL`)
5553: - r - the right scaling vector (or `NULL`)
5555: Level: intermediate
5557: Note:
5558: `MatDiagonalScale()` computes $A = LAR$, where
5559: L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5560: The L scales the rows of the matrix, the R scales the columns of the matrix.
5562: .seealso: [](ch_matrices), `Mat`, `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5563: @*/
5564: PetscErrorCode MatDiagonalScale(Mat mat, Vec l, Vec r)
5565: {
5566: PetscFunctionBegin;
5569: if (l) {
5571: PetscCheckSameComm(mat, 1, l, 2);
5572: }
5573: if (r) {
5575: PetscCheckSameComm(mat, 1, r, 3);
5576: }
5577: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5578: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5579: MatCheckPreallocated(mat, 1);
5580: if (!l && !r) PetscFunctionReturn(PETSC_SUCCESS);
5582: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5583: PetscUseTypeMethod(mat, diagonalscale, l, r);
5584: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5585: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5586: if (l != r) mat->symmetric = PETSC_BOOL3_FALSE;
5587: PetscFunctionReturn(PETSC_SUCCESS);
5588: }
5590: /*@
5591: MatScale - Scales all elements of a matrix by a given number.
5593: Logically Collective
5595: Input Parameters:
5596: + mat - the matrix to be scaled
5597: - a - the scaling value
5599: Level: intermediate
5601: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
5602: @*/
5603: PetscErrorCode MatScale(Mat mat, PetscScalar a)
5604: {
5605: PetscFunctionBegin;
5608: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5609: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5611: MatCheckPreallocated(mat, 1);
5613: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
5614: if (a != (PetscScalar)1.0) {
5615: PetscUseTypeMethod(mat, scale, a);
5616: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5617: }
5618: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
5619: PetscFunctionReturn(PETSC_SUCCESS);
5620: }
5622: /*@
5623: MatNorm - Calculates various norms of a matrix.
5625: Collective
5627: Input Parameters:
5628: + mat - the matrix
5629: - type - the type of norm, `NORM_1`, `NORM_FROBENIUS`, `NORM_INFINITY`
5631: Output Parameter:
5632: . nrm - the resulting norm
5634: Level: intermediate
5636: .seealso: [](ch_matrices), `Mat`
5637: @*/
5638: PetscErrorCode MatNorm(Mat mat, NormType type, PetscReal *nrm)
5639: {
5640: PetscFunctionBegin;
5643: PetscAssertPointer(nrm, 3);
5645: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
5646: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
5647: MatCheckPreallocated(mat, 1);
5649: PetscUseTypeMethod(mat, norm, type, nrm);
5650: PetscFunctionReturn(PETSC_SUCCESS);
5651: }
5653: /*
5654: This variable is used to prevent counting of MatAssemblyBegin() that
5655: are called from within a MatAssemblyEnd().
5656: */
5657: static PetscInt MatAssemblyEnd_InUse = 0;
5658: /*@
5659: MatAssemblyBegin - Begins assembling the matrix. This routine should
5660: be called after completing all calls to `MatSetValues()`.
5662: Collective
5664: Input Parameters:
5665: + mat - the matrix
5666: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5668: Level: beginner
5670: Notes:
5671: `MatSetValues()` generally caches the values that belong to other MPI processes. The matrix is ready to
5672: use only after `MatAssemblyBegin()` and `MatAssemblyEnd()` have been called.
5674: Use `MAT_FLUSH_ASSEMBLY` when switching between `ADD_VALUES` and `INSERT_VALUES`
5675: in `MatSetValues()`; use `MAT_FINAL_ASSEMBLY` for the final assembly before
5676: using the matrix.
5678: ALL processes that share a matrix MUST call `MatAssemblyBegin()` and `MatAssemblyEnd()` the SAME NUMBER of times, and each time with the
5679: same flag of `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY` for all processes. Thus you CANNOT locally change from `ADD_VALUES` to `INSERT_VALUES`, that is
5680: a global collective operation requiring all processes that share the matrix.
5682: Space for preallocated nonzeros that is not filled by a call to `MatSetValues()` or a related routine are compressed
5683: out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5684: before `MAT_FINAL_ASSEMBLY` so the space is not compressed out.
5686: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5687: @*/
5688: PetscErrorCode MatAssemblyBegin(Mat mat, MatAssemblyType type)
5689: {
5690: PetscFunctionBegin;
5693: MatCheckPreallocated(mat, 1);
5694: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix.\nDid you forget to call MatSetUnfactored()?");
5695: if (mat->assembled) {
5696: mat->was_assembled = PETSC_TRUE;
5697: mat->assembled = PETSC_FALSE;
5698: }
5700: if (!MatAssemblyEnd_InUse) {
5701: PetscCall(PetscLogEventBegin(MAT_AssemblyBegin, mat, 0, 0, 0));
5702: PetscTryTypeMethod(mat, assemblybegin, type);
5703: PetscCall(PetscLogEventEnd(MAT_AssemblyBegin, mat, 0, 0, 0));
5704: } else PetscTryTypeMethod(mat, assemblybegin, type);
5705: PetscFunctionReturn(PETSC_SUCCESS);
5706: }
5708: /*@
5709: MatAssembled - Indicates if a matrix has been assembled and is ready for
5710: use; for example, in matrix-vector product.
5712: Not Collective
5714: Input Parameter:
5715: . mat - the matrix
5717: Output Parameter:
5718: . assembled - `PETSC_TRUE` or `PETSC_FALSE`
5720: Level: advanced
5722: .seealso: [](ch_matrices), `Mat`, `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5723: @*/
5724: PetscErrorCode MatAssembled(Mat mat, PetscBool *assembled)
5725: {
5726: PetscFunctionBegin;
5728: PetscAssertPointer(assembled, 2);
5729: *assembled = mat->assembled;
5730: PetscFunctionReturn(PETSC_SUCCESS);
5731: }
5733: /*@
5734: MatAssemblyEnd - Completes assembling the matrix. This routine should
5735: be called after `MatAssemblyBegin()`.
5737: Collective
5739: Input Parameters:
5740: + mat - the matrix
5741: - type - type of assembly, either `MAT_FLUSH_ASSEMBLY` or `MAT_FINAL_ASSEMBLY`
5743: Options Database Keys:
5744: + -mat_view ::ascii_info - Prints info on matrix at conclusion of `MatAssemblyEnd()`
5745: . -mat_view ::ascii_info_detail - Prints more detailed info
5746: . -mat_view - Prints matrix in ASCII format
5747: . -mat_view ::ascii_matlab - Prints matrix in MATLAB format
5748: . -mat_view draw - draws nonzero structure of matrix, using `MatView()` and `PetscDrawOpenX()`.
5749: . -display <name> - Sets display name (default is host)
5750: . -draw_pause <sec> - Sets number of seconds to pause after display
5751: . -mat_view socket - Sends matrix to socket, can be accessed from MATLAB (See [Using MATLAB with PETSc](ch_matlab))
5752: . -viewer_socket_machine <machine> - Machine to use for socket
5753: . -viewer_socket_port <port> - Port number to use for socket
5754: - -mat_view binary:filename[:append] - Save matrix to file in binary format
5756: Level: beginner
5758: .seealso: [](ch_matrices), `Mat`, `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5759: @*/
5760: PetscErrorCode MatAssemblyEnd(Mat mat, MatAssemblyType type)
5761: {
5762: static PetscInt inassm = 0;
5763: PetscBool flg = PETSC_FALSE;
5765: PetscFunctionBegin;
5769: inassm++;
5770: MatAssemblyEnd_InUse++;
5771: if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5772: PetscCall(PetscLogEventBegin(MAT_AssemblyEnd, mat, 0, 0, 0));
5773: PetscTryTypeMethod(mat, assemblyend, type);
5774: PetscCall(PetscLogEventEnd(MAT_AssemblyEnd, mat, 0, 0, 0));
5775: } else PetscTryTypeMethod(mat, assemblyend, type);
5777: /* Flush assembly is not a true assembly */
5778: if (type != MAT_FLUSH_ASSEMBLY) {
5779: if (mat->num_ass) {
5780: if (!mat->symmetry_eternal) {
5781: mat->symmetric = PETSC_BOOL3_UNKNOWN;
5782: mat->hermitian = PETSC_BOOL3_UNKNOWN;
5783: }
5784: if (!mat->structural_symmetry_eternal && mat->ass_nonzerostate != mat->nonzerostate) mat->structurally_symmetric = PETSC_BOOL3_UNKNOWN;
5785: if (!mat->spd_eternal) mat->spd = PETSC_BOOL3_UNKNOWN;
5786: }
5787: mat->num_ass++;
5788: mat->assembled = PETSC_TRUE;
5789: mat->ass_nonzerostate = mat->nonzerostate;
5790: }
5792: mat->insertmode = NOT_SET_VALUES;
5793: MatAssemblyEnd_InUse--;
5794: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5795: if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5796: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
5798: if (mat->checksymmetryonassembly) {
5799: PetscCall(MatIsSymmetric(mat, mat->checksymmetrytol, &flg));
5800: if (flg) {
5801: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5802: } else {
5803: PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat), "Matrix is not symmetric (tolerance %g)\n", (double)mat->checksymmetrytol));
5804: }
5805: }
5806: if (mat->nullsp && mat->checknullspaceonassembly) PetscCall(MatNullSpaceTest(mat->nullsp, mat, NULL));
5807: }
5808: inassm--;
5809: PetscFunctionReturn(PETSC_SUCCESS);
5810: }
5812: // PetscClangLinter pragma disable: -fdoc-section-header-unknown
5813: /*@
5814: MatSetOption - Sets a parameter option for a matrix. Some options
5815: may be specific to certain storage formats. Some options
5816: determine how values will be inserted (or added). Sorted,
5817: row-oriented input will generally assemble the fastest. The default
5818: is row-oriented.
5820: Logically Collective for certain operations, such as `MAT_SPD`, not collective for `MAT_ROW_ORIENTED`, see `MatOption`
5822: Input Parameters:
5823: + mat - the matrix
5824: . op - the option, one of those listed below (and possibly others),
5825: - flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
5827: Options Describing Matrix Structure:
5828: + `MAT_SPD` - symmetric positive definite
5829: . `MAT_SYMMETRIC` - symmetric in terms of both structure and value
5830: . `MAT_HERMITIAN` - transpose is the complex conjugation
5831: . `MAT_STRUCTURALLY_SYMMETRIC` - symmetric nonzero structure
5832: . `MAT_SYMMETRY_ETERNAL` - indicates the symmetry (or Hermitian structure) or its absence will persist through any changes to the matrix
5833: . `MAT_STRUCTURAL_SYMMETRY_ETERNAL` - indicates the structural symmetry or its absence will persist through any changes to the matrix
5834: . `MAT_SPD_ETERNAL` - indicates the value of `MAT_SPD` (true or false) will persist through any changes to the matrix
5836: These are not really options of the matrix, they are knowledge about the structure of the matrix that users may provide so that they
5837: do not need to be computed (usually at a high cost)
5839: Options For Use with `MatSetValues()`:
5840: Insert a logically dense subblock, which can be
5841: . `MAT_ROW_ORIENTED` - row-oriented (default)
5843: These options reflect the data you pass in with `MatSetValues()`; it has
5844: nothing to do with how the data is stored internally in the matrix
5845: data structure.
5847: When (re)assembling a matrix, we can restrict the input for
5848: efficiency/debugging purposes. These options include
5849: . `MAT_NEW_NONZERO_LOCATIONS` - additional insertions will be allowed if they generate a new nonzero (slow)
5850: . `MAT_FORCE_DIAGONAL_ENTRIES` - forces diagonal entries to be allocated
5851: . `MAT_IGNORE_OFF_PROC_ENTRIES` - drops off-processor entries
5852: . `MAT_NEW_NONZERO_LOCATION_ERR` - generates an error for new matrix entry
5853: . `MAT_USE_HASH_TABLE` - uses a hash table to speed up matrix assembly
5854: . `MAT_NO_OFF_PROC_ENTRIES` - you know each process will only set values for its own rows, will generate an error if
5855: any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5856: performance for very large process counts.
5857: - `MAT_SUBSET_OFF_PROC_ENTRIES` - you know that the first assembly after setting this flag will set a superset
5858: of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5859: functions, instead sending only neighbor messages.
5861: Level: intermediate
5863: Notes:
5864: Except for `MAT_UNUSED_NONZERO_LOCATION_ERR` and `MAT_ROW_ORIENTED` all processes that share the matrix must pass the same value in flg!
5866: Some options are relevant only for particular matrix types and
5867: are thus ignored by others. Other options are not supported by
5868: certain matrix types and will generate an error message if set.
5870: If using Fortran to compute a matrix, one may need to
5871: use the column-oriented option (or convert to the row-oriented
5872: format).
5874: `MAT_NEW_NONZERO_LOCATIONS` set to `PETSC_FALSE` indicates that any add or insertion
5875: that would generate a new entry in the nonzero structure is instead
5876: ignored. Thus, if memory has not already been allocated for this particular
5877: data, then the insertion is ignored. For dense matrices, in which
5878: the entire array is allocated, no entries are ever ignored.
5879: Set after the first `MatAssemblyEnd()`. If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5881: `MAT_NEW_NONZERO_LOCATION_ERR` set to PETSC_TRUE indicates that any add or insertion
5882: that would generate a new entry in the nonzero structure instead produces
5883: an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats only.) If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5885: `MAT_NEW_NONZERO_ALLOCATION_ERR` set to `PETSC_TRUE` indicates that any add or insertion
5886: that would generate a new entry that has not been preallocated will
5887: instead produce an error. (Currently supported for `MATAIJ` and `MATBAIJ` formats
5888: only.) This is a useful flag when debugging matrix memory preallocation.
5889: If this option is set then the `MatAssemblyBegin()`/`MatAssemblyEnd()` processes has one less global reduction
5891: `MAT_IGNORE_OFF_PROC_ENTRIES` set to `PETSC_TRUE` indicates entries destined for
5892: other processors should be dropped, rather than stashed.
5893: This is useful if you know that the "owning" processor is also
5894: always generating the correct matrix entries, so that PETSc need
5895: not transfer duplicate entries generated on another processor.
5897: `MAT_USE_HASH_TABLE` indicates that a hash table be used to improve the
5898: searches during matrix assembly. When this flag is set, the hash table
5899: is created during the first matrix assembly. This hash table is
5900: used the next time through, during `MatSetValues()`/`MatSetValuesBlocked()`
5901: to improve the searching of indices. `MAT_NEW_NONZERO_LOCATIONS` flag
5902: should be used with `MAT_USE_HASH_TABLE` flag. This option is currently
5903: supported by `MATMPIBAIJ` format only.
5905: `MAT_KEEP_NONZERO_PATTERN` indicates when `MatZeroRows()` is called the zeroed entries
5906: are kept in the nonzero structure
5908: `MAT_IGNORE_ZERO_ENTRIES` - for `MATAIJ` and `MATIS` matrices this will stop zero values from creating
5909: a zero location in the matrix
5911: `MAT_USE_INODES` - indicates using inode version of the code - works with `MATAIJ` matrix types
5913: `MAT_NO_OFF_PROC_ZERO_ROWS` - you know each process will only zero its own rows. This avoids all reductions in the
5914: zero row routines and thus improves performance for very large process counts.
5916: `MAT_IGNORE_LOWER_TRIANGULAR` - For `MATSBAIJ` matrices will ignore any insertions you make in the lower triangular
5917: part of the matrix (since they should match the upper triangular part).
5919: `MAT_SORTED_FULL` - each process provides exactly its local rows; all column indices for a given row are passed in a
5920: single call to `MatSetValues()`, preallocation is perfect, row oriented, `INSERT_VALUES` is used. Common
5921: with finite difference schemes with non-periodic boundary conditions.
5923: Developer Note:
5924: `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, and `MAT_SPD_ETERNAL` are used by `MatAssemblyEnd()` and in other
5925: places where otherwise the value of `MAT_SYMMETRIC`, `MAT_STRUCTURALLY_SYMMETRIC` or `MAT_SPD` would need to be changed back
5926: to `PETSC_BOOL3_UNKNOWN` because the matrix values had changed so the code cannot be certain that the related property had
5927: not changed.
5929: .seealso: [](ch_matrices), `MatOption`, `Mat`, `MatGetOption()`
5930: @*/
5931: PetscErrorCode MatSetOption(Mat mat, MatOption op, PetscBool flg)
5932: {
5933: PetscFunctionBegin;
5935: if (op > 0) {
5938: }
5940: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
5942: switch (op) {
5943: case MAT_FORCE_DIAGONAL_ENTRIES:
5944: mat->force_diagonals = flg;
5945: PetscFunctionReturn(PETSC_SUCCESS);
5946: case MAT_NO_OFF_PROC_ENTRIES:
5947: mat->nooffprocentries = flg;
5948: PetscFunctionReturn(PETSC_SUCCESS);
5949: case MAT_SUBSET_OFF_PROC_ENTRIES:
5950: mat->assembly_subset = flg;
5951: if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5952: #if !defined(PETSC_HAVE_MPIUNI)
5953: PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
5954: #endif
5955: mat->stash.first_assembly_done = PETSC_FALSE;
5956: }
5957: PetscFunctionReturn(PETSC_SUCCESS);
5958: case MAT_NO_OFF_PROC_ZERO_ROWS:
5959: mat->nooffproczerorows = flg;
5960: PetscFunctionReturn(PETSC_SUCCESS);
5961: case MAT_SPD:
5962: if (flg) {
5963: mat->spd = PETSC_BOOL3_TRUE;
5964: mat->symmetric = PETSC_BOOL3_TRUE;
5965: mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5966: } else {
5967: mat->spd = PETSC_BOOL3_FALSE;
5968: }
5969: break;
5970: case MAT_SYMMETRIC:
5971: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5972: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5973: #if !defined(PETSC_USE_COMPLEX)
5974: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5975: #endif
5976: break;
5977: case MAT_HERMITIAN:
5978: mat->hermitian = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5979: if (flg) mat->structurally_symmetric = PETSC_BOOL3_TRUE;
5980: #if !defined(PETSC_USE_COMPLEX)
5981: mat->symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5982: #endif
5983: break;
5984: case MAT_STRUCTURALLY_SYMMETRIC:
5985: mat->structurally_symmetric = flg ? PETSC_BOOL3_TRUE : PETSC_BOOL3_FALSE;
5986: break;
5987: case MAT_SYMMETRY_ETERNAL:
5988: PetscCheck(mat->symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SYMMETRY_ETERNAL without first setting MAT_SYMMETRIC to true or false");
5989: mat->symmetry_eternal = flg;
5990: if (flg) mat->structural_symmetry_eternal = PETSC_TRUE;
5991: break;
5992: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
5993: PetscCheck(mat->structurally_symmetric != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_STRUCTURAL_SYMMETRY_ETERNAL without first setting MAT_STRUCTURALLY_SYMMETRIC to true or false");
5994: mat->structural_symmetry_eternal = flg;
5995: break;
5996: case MAT_SPD_ETERNAL:
5997: PetscCheck(mat->spd != PETSC_BOOL3_UNKNOWN, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot set MAT_SPD_ETERNAL without first setting MAT_SPD to true or false");
5998: mat->spd_eternal = flg;
5999: if (flg) {
6000: mat->structural_symmetry_eternal = PETSC_TRUE;
6001: mat->symmetry_eternal = PETSC_TRUE;
6002: }
6003: break;
6004: case MAT_STRUCTURE_ONLY:
6005: mat->structure_only = flg;
6006: break;
6007: case MAT_SORTED_FULL:
6008: mat->sortedfull = flg;
6009: break;
6010: default:
6011: break;
6012: }
6013: PetscTryTypeMethod(mat, setoption, op, flg);
6014: PetscFunctionReturn(PETSC_SUCCESS);
6015: }
6017: /*@
6018: MatGetOption - Gets a parameter option that has been set for a matrix.
6020: Logically Collective
6022: Input Parameters:
6023: + mat - the matrix
6024: - op - the option, this only responds to certain options, check the code for which ones
6026: Output Parameter:
6027: . flg - turn the option on (`PETSC_TRUE`) or off (`PETSC_FALSE`)
6029: Level: intermediate
6031: Notes:
6032: Can only be called after `MatSetSizes()` and `MatSetType()` have been set.
6034: Certain option values may be unknown, for those use the routines `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, or
6035: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6037: .seealso: [](ch_matrices), `Mat`, `MatOption`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`,
6038: `MatIsSymmetricKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
6039: @*/
6040: PetscErrorCode MatGetOption(Mat mat, MatOption op, PetscBool *flg)
6041: {
6042: PetscFunctionBegin;
6046: PetscCheck(((int)op) > MAT_OPTION_MIN && ((int)op) < MAT_OPTION_MAX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Options %d is out of range", (int)op);
6047: PetscCheck(((PetscObject)mat)->type_name, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_TYPENOTSET, "Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
6049: switch (op) {
6050: case MAT_NO_OFF_PROC_ENTRIES:
6051: *flg = mat->nooffprocentries;
6052: break;
6053: case MAT_NO_OFF_PROC_ZERO_ROWS:
6054: *flg = mat->nooffproczerorows;
6055: break;
6056: case MAT_SYMMETRIC:
6057: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSymmetric() or MatIsSymmetricKnown()");
6058: break;
6059: case MAT_HERMITIAN:
6060: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsHermitian() or MatIsHermitianKnown()");
6061: break;
6062: case MAT_STRUCTURALLY_SYMMETRIC:
6063: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsStructurallySymmetric() or MatIsStructurallySymmetricKnown()");
6064: break;
6065: case MAT_SPD:
6066: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Use MatIsSPDKnown()");
6067: break;
6068: case MAT_SYMMETRY_ETERNAL:
6069: *flg = mat->symmetry_eternal;
6070: break;
6071: case MAT_STRUCTURAL_SYMMETRY_ETERNAL:
6072: *flg = mat->symmetry_eternal;
6073: break;
6074: default:
6075: break;
6076: }
6077: PetscFunctionReturn(PETSC_SUCCESS);
6078: }
6080: /*@
6081: MatZeroEntries - Zeros all entries of a matrix. For sparse matrices
6082: this routine retains the old nonzero structure.
6084: Logically Collective
6086: Input Parameter:
6087: . mat - the matrix
6089: Level: intermediate
6091: Note:
6092: If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
6093: See the Performance chapter of the users manual for information on preallocating matrices.
6095: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`
6096: @*/
6097: PetscErrorCode MatZeroEntries(Mat mat)
6098: {
6099: PetscFunctionBegin;
6102: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6103: PetscCheck(mat->insertmode == NOT_SET_VALUES, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for matrices where you have set values but not yet assembled");
6104: MatCheckPreallocated(mat, 1);
6106: PetscCall(PetscLogEventBegin(MAT_ZeroEntries, mat, 0, 0, 0));
6107: PetscUseTypeMethod(mat, zeroentries);
6108: PetscCall(PetscLogEventEnd(MAT_ZeroEntries, mat, 0, 0, 0));
6109: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6110: PetscFunctionReturn(PETSC_SUCCESS);
6111: }
6113: /*@
6114: MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
6115: of a set of rows and columns of a matrix.
6117: Collective
6119: Input Parameters:
6120: + mat - the matrix
6121: . numRows - the number of rows/columns to zero
6122: . rows - the global row indices
6123: . diag - value put in the diagonal of the eliminated rows
6124: . x - optional vector of the solution for zeroed rows (other entries in vector are not used), these must be set before this call
6125: - b - optional vector of the right hand side, that will be adjusted by provided solution entries
6127: Level: intermediate
6129: Notes:
6130: This routine, along with `MatZeroRows()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6132: For each zeroed row, the value of the corresponding `b` is set to diag times the value of the corresponding `x`.
6133: The other entries of `b` will be adjusted by the known values of `x` times the corresponding matrix entries in the columns that are being eliminated
6135: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6136: Krylov method to take advantage of the known solution on the zeroed rows.
6138: For the parallel case, all processes that share the matrix (i.e.,
6139: those in the communicator used for matrix creation) MUST call this
6140: routine, regardless of whether any rows being zeroed are owned by
6141: them.
6143: Unlike `MatZeroRows()` this does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
6145: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6146: list only rows local to itself).
6148: The option `MAT_NO_OFF_PROC_ZERO_ROWS` does not apply to this routine.
6150: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6151: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6152: @*/
6153: PetscErrorCode MatZeroRowsColumns(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6154: {
6155: PetscFunctionBegin;
6158: if (numRows) PetscAssertPointer(rows, 3);
6159: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6160: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6161: MatCheckPreallocated(mat, 1);
6163: PetscUseTypeMethod(mat, zerorowscolumns, numRows, rows, diag, x, b);
6164: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6165: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6166: PetscFunctionReturn(PETSC_SUCCESS);
6167: }
6169: /*@
6170: MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
6171: of a set of rows and columns of a matrix.
6173: Collective
6175: Input Parameters:
6176: + mat - the matrix
6177: . is - the rows to zero
6178: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6179: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6180: - b - optional vector of right hand side, that will be adjusted by provided solution
6182: Level: intermediate
6184: Note:
6185: See `MatZeroRowsColumns()` for details on how this routine operates.
6187: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6188: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
6189: @*/
6190: PetscErrorCode MatZeroRowsColumnsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6191: {
6192: PetscInt numRows;
6193: const PetscInt *rows;
6195: PetscFunctionBegin;
6200: PetscCall(ISGetLocalSize(is, &numRows));
6201: PetscCall(ISGetIndices(is, &rows));
6202: PetscCall(MatZeroRowsColumns(mat, numRows, rows, diag, x, b));
6203: PetscCall(ISRestoreIndices(is, &rows));
6204: PetscFunctionReturn(PETSC_SUCCESS);
6205: }
6207: /*@
6208: MatZeroRows - Zeros all entries (except possibly the main diagonal)
6209: of a set of rows of a matrix.
6211: Collective
6213: Input Parameters:
6214: + mat - the matrix
6215: . numRows - the number of rows to zero
6216: . rows - the global row indices
6217: . diag - value put in the diagonal of the zeroed rows
6218: . x - optional vector of solutions for zeroed rows (other entries in vector are not used), these must be set before this call
6219: - b - optional vector of right hand side, that will be adjusted by provided solution entries
6221: Level: intermediate
6223: Notes:
6224: This routine, along with `MatZeroRowsColumns()`, is typically used to eliminate known Dirichlet boundary conditions from a linear system.
6226: For each zeroed row, the value of the corresponding `b` is set to `diag` times the value of the corresponding `x`.
6228: If the resulting linear system is to be solved with `KSP` then one can (but does not have to) call `KSPSetInitialGuessNonzero()` to allow the
6229: Krylov method to take advantage of the known solution on the zeroed rows.
6231: May be followed by using a `PC` of type `PCREDISTRIBUTE` to solve the reduced problem (`PCDISTRIBUTE` completely eliminates the zeroed rows and their corresponding columns)
6232: from the matrix.
6234: Unlike `MatZeroRowsColumns()` for the `MATAIJ` and `MATBAIJ` matrix formats this removes the old nonzero structure, from the eliminated rows of the matrix
6235: but does not release memory. Because of this removal matrix-vector products with the adjusted matrix will be a bit faster. For the dense and block diagonal
6236: formats this does not alter the nonzero structure.
6238: If the option `MatSetOption`(mat,`MAT_KEEP_NONZERO_PATTERN`,`PETSC_TRUE`) the nonzero structure
6239: of the matrix is not changed the values are
6240: merely zeroed.
6242: The user can set a value in the diagonal entry (or for the `MATAIJ` format
6243: formats can optionally remove the main diagonal entry from the
6244: nonzero structure as well, by passing 0.0 as the final argument).
6246: For the parallel case, all processes that share the matrix (i.e.,
6247: those in the communicator used for matrix creation) MUST call this
6248: routine, regardless of whether any rows being zeroed are owned by
6249: them.
6251: Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6252: list only rows local to itself).
6254: You can call `MatSetOption`(mat,`MAT_NO_OFF_PROC_ZERO_ROWS`,`PETSC_TRUE`) if each process indicates only rows it
6255: owns that are to be zeroed. This saves a global synchronization in the implementation.
6257: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6258: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`, `PCREDISTRIBUTE`
6259: @*/
6260: PetscErrorCode MatZeroRows(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6261: {
6262: PetscFunctionBegin;
6265: if (numRows) PetscAssertPointer(rows, 3);
6266: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6267: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6268: MatCheckPreallocated(mat, 1);
6270: PetscUseTypeMethod(mat, zerorows, numRows, rows, diag, x, b);
6271: PetscCall(MatViewFromOptions(mat, NULL, "-mat_view"));
6272: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6273: PetscFunctionReturn(PETSC_SUCCESS);
6274: }
6276: /*@
6277: MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6278: of a set of rows of a matrix.
6280: Collective
6282: Input Parameters:
6283: + mat - the matrix
6284: . is - index set of rows to remove (if `NULL` then no row is removed)
6285: . diag - value put in all diagonals of eliminated rows
6286: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6287: - b - optional vector of right hand side, that will be adjusted by provided solution
6289: Level: intermediate
6291: Note:
6292: See `MatZeroRows()` for details on how this routine operates.
6294: .seealso: [](ch_matrices), `Mat`, `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6295: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6296: @*/
6297: PetscErrorCode MatZeroRowsIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6298: {
6299: PetscInt numRows = 0;
6300: const PetscInt *rows = NULL;
6302: PetscFunctionBegin;
6305: if (is) {
6307: PetscCall(ISGetLocalSize(is, &numRows));
6308: PetscCall(ISGetIndices(is, &rows));
6309: }
6310: PetscCall(MatZeroRows(mat, numRows, rows, diag, x, b));
6311: if (is) PetscCall(ISRestoreIndices(is, &rows));
6312: PetscFunctionReturn(PETSC_SUCCESS);
6313: }
6315: /*@
6316: MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6317: of a set of rows of a matrix. These rows must be local to the process.
6319: Collective
6321: Input Parameters:
6322: + mat - the matrix
6323: . numRows - the number of rows to remove
6324: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6325: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6326: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6327: - b - optional vector of right hand side, that will be adjusted by provided solution
6329: Level: intermediate
6331: Notes:
6332: See `MatZeroRows()` for details on how this routine operates.
6334: The grid coordinates are across the entire grid, not just the local portion
6336: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6337: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6338: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6339: `DM_BOUNDARY_PERIODIC` boundary type.
6341: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6342: a single value per point) you can skip filling those indices.
6344: Fortran Note:
6345: `idxm` and `idxn` should be declared as
6346: $ MatStencil idxm(4, m)
6347: and the values inserted using
6348: .vb
6349: idxm(MatStencil_i, 1) = i
6350: idxm(MatStencil_j, 1) = j
6351: idxm(MatStencil_k, 1) = k
6352: idxm(MatStencil_c, 1) = c
6353: etc
6354: .ve
6356: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6357: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6358: @*/
6359: PetscErrorCode MatZeroRowsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6360: {
6361: PetscInt dim = mat->stencil.dim;
6362: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6363: PetscInt *dims = mat->stencil.dims + 1;
6364: PetscInt *starts = mat->stencil.starts;
6365: PetscInt *dxm = (PetscInt *)rows;
6366: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6368: PetscFunctionBegin;
6371: if (numRows) PetscAssertPointer(rows, 3);
6373: PetscCall(PetscMalloc1(numRows, &jdxm));
6374: for (i = 0; i < numRows; ++i) {
6375: /* Skip unused dimensions (they are ordered k, j, i, c) */
6376: for (j = 0; j < 3 - sdim; ++j) dxm++;
6377: /* Local index in X dir */
6378: tmp = *dxm++ - starts[0];
6379: /* Loop over remaining dimensions */
6380: for (j = 0; j < dim - 1; ++j) {
6381: /* If nonlocal, set index to be negative */
6382: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6383: /* Update local index */
6384: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6385: }
6386: /* Skip component slot if necessary */
6387: if (mat->stencil.noc) dxm++;
6388: /* Local row number */
6389: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6390: }
6391: PetscCall(MatZeroRowsLocal(mat, numNewRows, jdxm, diag, x, b));
6392: PetscCall(PetscFree(jdxm));
6393: PetscFunctionReturn(PETSC_SUCCESS);
6394: }
6396: /*@
6397: MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6398: of a set of rows and columns of a matrix.
6400: Collective
6402: Input Parameters:
6403: + mat - the matrix
6404: . numRows - the number of rows/columns to remove
6405: . rows - the grid coordinates (and component number when dof > 1) for matrix rows
6406: . diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6407: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6408: - b - optional vector of right hand side, that will be adjusted by provided solution
6410: Level: intermediate
6412: Notes:
6413: See `MatZeroRowsColumns()` for details on how this routine operates.
6415: The grid coordinates are across the entire grid, not just the local portion
6417: For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6418: obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6419: etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6420: `DM_BOUNDARY_PERIODIC` boundary type.
6422: For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6423: a single value per point) you can skip filling those indices.
6425: Fortran Note:
6426: `idxm` and `idxn` should be declared as
6427: $ MatStencil idxm(4, m)
6428: and the values inserted using
6429: .vb
6430: idxm(MatStencil_i, 1) = i
6431: idxm(MatStencil_j, 1) = j
6432: idxm(MatStencil_k, 1) = k
6433: idxm(MatStencil_c, 1) = c
6434: etc
6435: .ve
6437: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6438: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6439: @*/
6440: PetscErrorCode MatZeroRowsColumnsStencil(Mat mat, PetscInt numRows, const MatStencil rows[], PetscScalar diag, Vec x, Vec b)
6441: {
6442: PetscInt dim = mat->stencil.dim;
6443: PetscInt sdim = dim - (1 - (PetscInt)mat->stencil.noc);
6444: PetscInt *dims = mat->stencil.dims + 1;
6445: PetscInt *starts = mat->stencil.starts;
6446: PetscInt *dxm = (PetscInt *)rows;
6447: PetscInt *jdxm, i, j, tmp, numNewRows = 0;
6449: PetscFunctionBegin;
6452: if (numRows) PetscAssertPointer(rows, 3);
6454: PetscCall(PetscMalloc1(numRows, &jdxm));
6455: for (i = 0; i < numRows; ++i) {
6456: /* Skip unused dimensions (they are ordered k, j, i, c) */
6457: for (j = 0; j < 3 - sdim; ++j) dxm++;
6458: /* Local index in X dir */
6459: tmp = *dxm++ - starts[0];
6460: /* Loop over remaining dimensions */
6461: for (j = 0; j < dim - 1; ++j) {
6462: /* If nonlocal, set index to be negative */
6463: if ((*dxm++ - starts[j + 1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6464: /* Update local index */
6465: else tmp = tmp * dims[j] + *(dxm - 1) - starts[j + 1];
6466: }
6467: /* Skip component slot if necessary */
6468: if (mat->stencil.noc) dxm++;
6469: /* Local row number */
6470: if (tmp >= 0) jdxm[numNewRows++] = tmp;
6471: }
6472: PetscCall(MatZeroRowsColumnsLocal(mat, numNewRows, jdxm, diag, x, b));
6473: PetscCall(PetscFree(jdxm));
6474: PetscFunctionReturn(PETSC_SUCCESS);
6475: }
6477: /*@C
6478: MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6479: of a set of rows of a matrix; using local numbering of rows.
6481: Collective
6483: Input Parameters:
6484: + mat - the matrix
6485: . numRows - the number of rows to remove
6486: . rows - the local row indices
6487: . diag - value put in all diagonals of eliminated rows
6488: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6489: - b - optional vector of right hand side, that will be adjusted by provided solution
6491: Level: intermediate
6493: Notes:
6494: Before calling `MatZeroRowsLocal()`, the user must first set the
6495: local-to-global mapping by calling MatSetLocalToGlobalMapping(), this is often already set for matrices obtained with `DMCreateMatrix()`.
6497: See `MatZeroRows()` for details on how this routine operates.
6499: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6500: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6501: @*/
6502: PetscErrorCode MatZeroRowsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6503: {
6504: PetscFunctionBegin;
6507: if (numRows) PetscAssertPointer(rows, 3);
6508: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6509: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6510: MatCheckPreallocated(mat, 1);
6512: if (mat->ops->zerorowslocal) {
6513: PetscUseTypeMethod(mat, zerorowslocal, numRows, rows, diag, x, b);
6514: } else {
6515: IS is, newis;
6516: const PetscInt *newRows;
6518: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6519: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6520: PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping, is, &newis));
6521: PetscCall(ISGetIndices(newis, &newRows));
6522: PetscUseTypeMethod(mat, zerorows, numRows, newRows, diag, x, b);
6523: PetscCall(ISRestoreIndices(newis, &newRows));
6524: PetscCall(ISDestroy(&newis));
6525: PetscCall(ISDestroy(&is));
6526: }
6527: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6528: PetscFunctionReturn(PETSC_SUCCESS);
6529: }
6531: /*@
6532: MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6533: of a set of rows of a matrix; using local numbering of rows.
6535: Collective
6537: Input Parameters:
6538: + mat - the matrix
6539: . is - index set of rows to remove
6540: . diag - value put in all diagonals of eliminated rows
6541: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6542: - b - optional vector of right hand side, that will be adjusted by provided solution
6544: Level: intermediate
6546: Notes:
6547: Before calling `MatZeroRowsLocalIS()`, the user must first set the
6548: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6550: See `MatZeroRows()` for details on how this routine operates.
6552: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6553: `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6554: @*/
6555: PetscErrorCode MatZeroRowsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6556: {
6557: PetscInt numRows;
6558: const PetscInt *rows;
6560: PetscFunctionBegin;
6564: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6565: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6566: MatCheckPreallocated(mat, 1);
6568: PetscCall(ISGetLocalSize(is, &numRows));
6569: PetscCall(ISGetIndices(is, &rows));
6570: PetscCall(MatZeroRowsLocal(mat, numRows, rows, diag, x, b));
6571: PetscCall(ISRestoreIndices(is, &rows));
6572: PetscFunctionReturn(PETSC_SUCCESS);
6573: }
6575: /*@
6576: MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6577: of a set of rows and columns of a matrix; using local numbering of rows.
6579: Collective
6581: Input Parameters:
6582: + mat - the matrix
6583: . numRows - the number of rows to remove
6584: . rows - the global row indices
6585: . diag - value put in all diagonals of eliminated rows
6586: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6587: - b - optional vector of right hand side, that will be adjusted by provided solution
6589: Level: intermediate
6591: Notes:
6592: Before calling `MatZeroRowsColumnsLocal()`, the user must first set the
6593: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6595: See `MatZeroRowsColumns()` for details on how this routine operates.
6597: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6598: `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6599: @*/
6600: PetscErrorCode MatZeroRowsColumnsLocal(Mat mat, PetscInt numRows, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
6601: {
6602: IS is, newis;
6603: const PetscInt *newRows;
6605: PetscFunctionBegin;
6608: if (numRows) PetscAssertPointer(rows, 3);
6609: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6610: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6611: MatCheckPreallocated(mat, 1);
6613: PetscCheck(mat->cmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Need to provide local to global mapping to matrix first");
6614: PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numRows, rows, PETSC_COPY_VALUES, &is));
6615: PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping, is, &newis));
6616: PetscCall(ISGetIndices(newis, &newRows));
6617: PetscUseTypeMethod(mat, zerorowscolumns, numRows, newRows, diag, x, b);
6618: PetscCall(ISRestoreIndices(newis, &newRows));
6619: PetscCall(ISDestroy(&newis));
6620: PetscCall(ISDestroy(&is));
6621: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6622: PetscFunctionReturn(PETSC_SUCCESS);
6623: }
6625: /*@
6626: MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6627: of a set of rows and columns of a matrix; using local numbering of rows.
6629: Collective
6631: Input Parameters:
6632: + mat - the matrix
6633: . is - index set of rows to remove
6634: . diag - value put in all diagonals of eliminated rows
6635: . x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6636: - b - optional vector of right hand side, that will be adjusted by provided solution
6638: Level: intermediate
6640: Notes:
6641: Before calling `MatZeroRowsColumnsLocalIS()`, the user must first set the
6642: local-to-global mapping by calling `MatSetLocalToGlobalMapping()`, this is often already set for matrices obtained with `DMCreateMatrix()`.
6644: See `MatZeroRowsColumns()` for details on how this routine operates.
6646: .seealso: [](ch_matrices), `Mat`, `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6647: `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6648: @*/
6649: PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat, IS is, PetscScalar diag, Vec x, Vec b)
6650: {
6651: PetscInt numRows;
6652: const PetscInt *rows;
6654: PetscFunctionBegin;
6658: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6659: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6660: MatCheckPreallocated(mat, 1);
6662: PetscCall(ISGetLocalSize(is, &numRows));
6663: PetscCall(ISGetIndices(is, &rows));
6664: PetscCall(MatZeroRowsColumnsLocal(mat, numRows, rows, diag, x, b));
6665: PetscCall(ISRestoreIndices(is, &rows));
6666: PetscFunctionReturn(PETSC_SUCCESS);
6667: }
6669: /*@C
6670: MatGetSize - Returns the numbers of rows and columns in a matrix.
6672: Not Collective
6674: Input Parameter:
6675: . mat - the matrix
6677: Output Parameters:
6678: + m - the number of global rows
6679: - n - the number of global columns
6681: Level: beginner
6683: Note:
6684: Both output parameters can be `NULL` on input.
6686: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetLocalSize()`
6687: @*/
6688: PetscErrorCode MatGetSize(Mat mat, PetscInt *m, PetscInt *n)
6689: {
6690: PetscFunctionBegin;
6692: if (m) *m = mat->rmap->N;
6693: if (n) *n = mat->cmap->N;
6694: PetscFunctionReturn(PETSC_SUCCESS);
6695: }
6697: /*@C
6698: MatGetLocalSize - For most matrix formats, excluding `MATELEMENTAL` and `MATSCALAPACK`, Returns the number of local rows and local columns
6699: of a matrix. For all matrices this is the local size of the left and right vectors as returned by `MatCreateVecs()`.
6701: Not Collective
6703: Input Parameter:
6704: . mat - the matrix
6706: Output Parameters:
6707: + m - the number of local rows, use `NULL` to not obtain this value
6708: - n - the number of local columns, use `NULL` to not obtain this value
6710: Level: beginner
6712: .seealso: [](ch_matrices), `Mat`, `MatSetSizes()`, `MatGetSize()`
6713: @*/
6714: PetscErrorCode MatGetLocalSize(Mat mat, PetscInt *m, PetscInt *n)
6715: {
6716: PetscFunctionBegin;
6718: if (m) PetscAssertPointer(m, 2);
6719: if (n) PetscAssertPointer(n, 3);
6720: if (m) *m = mat->rmap->n;
6721: if (n) *n = mat->cmap->n;
6722: PetscFunctionReturn(PETSC_SUCCESS);
6723: }
6725: /*@C
6726: MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a
6727: vector one multiplies this matrix by that are owned by this processor.
6729: Not Collective, unless matrix has not been allocated, then collective
6731: Input Parameter:
6732: . mat - the matrix
6734: Output Parameters:
6735: + m - the global index of the first local column, use `NULL` to not obtain this value
6736: - n - one more than the global index of the last local column, use `NULL` to not obtain this value
6738: Level: developer
6740: Note:
6741: Returns the columns of the "diagonal block" for most sparse matrix formats. See [Matrix
6742: Layouts](sec_matlayout) for details on matrix layouts.
6744: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6745: @*/
6746: PetscErrorCode MatGetOwnershipRangeColumn(Mat mat, PetscInt *m, PetscInt *n)
6747: {
6748: PetscFunctionBegin;
6751: if (m) PetscAssertPointer(m, 2);
6752: if (n) PetscAssertPointer(n, 3);
6753: MatCheckPreallocated(mat, 1);
6754: if (m) *m = mat->cmap->rstart;
6755: if (n) *n = mat->cmap->rend;
6756: PetscFunctionReturn(PETSC_SUCCESS);
6757: }
6759: /*@C
6760: MatGetOwnershipRange - For matrices that own values by row, excludes `MATELEMENTAL` and `MATSCALAPACK`, returns the range of matrix rows owned by
6761: this MPI process.
6763: Not Collective
6765: Input Parameter:
6766: . mat - the matrix
6768: Output Parameters:
6769: + m - the global index of the first local row, use `NULL` to not obtain this value
6770: - n - one more than the global index of the last local row, use `NULL` to not obtain this value
6772: Level: beginner
6774: Note:
6775: For all matrices it returns the range of matrix rows associated with rows of a vector that
6776: would contain the result of a matrix vector product with this matrix. See [Matrix
6777: Layouts](sec_matlayout) for details on matrix layouts.
6779: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`,
6780: `PetscLayout`
6781: @*/
6782: PetscErrorCode MatGetOwnershipRange(Mat mat, PetscInt *m, PetscInt *n)
6783: {
6784: PetscFunctionBegin;
6787: if (m) PetscAssertPointer(m, 2);
6788: if (n) PetscAssertPointer(n, 3);
6789: MatCheckPreallocated(mat, 1);
6790: if (m) *m = mat->rmap->rstart;
6791: if (n) *n = mat->rmap->rend;
6792: PetscFunctionReturn(PETSC_SUCCESS);
6793: }
6795: /*@C
6796: MatGetOwnershipRanges - For matrices that own values by row, excludes `MATELEMENTAL` and
6797: `MATSCALAPACK`, returns the range of matrix rows owned by each process.
6799: Not Collective, unless matrix has not been allocated
6801: Input Parameter:
6802: . mat - the matrix
6804: Output Parameter:
6805: . ranges - start of each processors portion plus one more than the total length at the end
6807: Level: beginner
6809: Note:
6810: For all matrices it returns the ranges of matrix rows associated with rows of a vector that
6811: would contain the result of a matrix vector product with this matrix. See [Matrix
6812: Layouts](sec_matlayout) for details on matrix layouts.
6814: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscLayout`
6815: @*/
6816: PetscErrorCode MatGetOwnershipRanges(Mat mat, const PetscInt **ranges)
6817: {
6818: PetscFunctionBegin;
6821: MatCheckPreallocated(mat, 1);
6822: PetscCall(PetscLayoutGetRanges(mat->rmap, ranges));
6823: PetscFunctionReturn(PETSC_SUCCESS);
6824: }
6826: /*@C
6827: MatGetOwnershipRangesColumn - Returns the ranges of matrix columns associated with rows of a
6828: vector one multiplies this vector by that are owned by each processor.
6830: Not Collective, unless matrix has not been allocated
6832: Input Parameter:
6833: . mat - the matrix
6835: Output Parameter:
6836: . ranges - start of each processors portion plus one more than the total length at the end
6838: Level: beginner
6840: Note:
6841: Returns the columns of the "diagonal blocks", for most sparse matrix formats. See [Matrix
6842: Layouts](sec_matlayout) for details on matrix layouts.
6844: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6845: @*/
6846: PetscErrorCode MatGetOwnershipRangesColumn(Mat mat, const PetscInt **ranges)
6847: {
6848: PetscFunctionBegin;
6851: MatCheckPreallocated(mat, 1);
6852: PetscCall(PetscLayoutGetRanges(mat->cmap, ranges));
6853: PetscFunctionReturn(PETSC_SUCCESS);
6854: }
6856: /*@C
6857: MatGetOwnershipIS - Get row and column ownership of a matrices' values as index sets.
6859: Not Collective
6861: Input Parameter:
6862: . A - matrix
6864: Output Parameters:
6865: + rows - rows in which this process owns elements, , use `NULL` to not obtain this value
6866: - cols - columns in which this process owns elements, use `NULL` to not obtain this value
6868: Level: intermediate
6870: Note:
6871: For most matrices, excluding `MATELEMENTAL` and `MATSCALAPACK`, this corresponds to values
6872: returned by `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`. For `MATELEMENTAL` and
6873: `MATSCALAPACK` the ownership is more complicated. See [Matrix Layouts](sec_matlayout) for
6874: details on matrix layouts.
6876: .seealso: [](ch_matrices), `Mat`, `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, ``MATELEMENTAL``, ``MATSCALAPACK``
6877: @*/
6878: PetscErrorCode MatGetOwnershipIS(Mat A, IS *rows, IS *cols)
6879: {
6880: PetscErrorCode (*f)(Mat, IS *, IS *);
6882: PetscFunctionBegin;
6883: MatCheckPreallocated(A, 1);
6884: PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatGetOwnershipIS_C", &f));
6885: if (f) {
6886: PetscCall((*f)(A, rows, cols));
6887: } else { /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6888: if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->rmap->n, A->rmap->rstart, 1, rows));
6889: if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF, A->cmap->N, 0, 1, cols));
6890: }
6891: PetscFunctionReturn(PETSC_SUCCESS);
6892: }
6894: /*@C
6895: MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix obtained with `MatGetFactor()`
6896: Uses levels of fill only, not drop tolerance. Use `MatLUFactorNumeric()`
6897: to complete the factorization.
6899: Collective
6901: Input Parameters:
6902: + fact - the factorized matrix obtained with `MatGetFactor()`
6903: . mat - the matrix
6904: . row - row permutation
6905: . col - column permutation
6906: - info - structure containing
6907: .vb
6908: levels - number of levels of fill.
6909: expected fill - as ratio of original fill.
6910: 1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6911: missing diagonal entries)
6912: .ve
6914: Level: developer
6916: Notes:
6917: See [Matrix Factorization](sec_matfactor) for additional information.
6919: Most users should employ the `KSP` interface for linear solvers
6920: instead of working directly with matrix algebra routines such as this.
6921: See, e.g., `KSPCreate()`.
6923: Uses the definition of level of fill as in Y. Saad, {cite}`saad2003`
6925: Developer Note:
6926: The Fortran interface is not autogenerated as the
6927: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6929: .seealso: [](ch_matrices), `Mat`, [Matrix Factorization](sec_matfactor), `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
6930: `MatGetOrdering()`, `MatFactorInfo`
6931: @*/
6932: PetscErrorCode MatILUFactorSymbolic(Mat fact, Mat mat, IS row, IS col, const MatFactorInfo *info)
6933: {
6934: PetscFunctionBegin;
6939: PetscAssertPointer(info, 5);
6940: PetscAssertPointer(fact, 1);
6941: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels of fill negative %" PetscInt_FMT, (PetscInt)info->levels);
6942: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6943: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6944: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6945: MatCheckPreallocated(mat, 2);
6947: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic, mat, row, col, 0));
6948: PetscUseTypeMethod(fact, ilufactorsymbolic, mat, row, col, info);
6949: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic, mat, row, col, 0));
6950: PetscFunctionReturn(PETSC_SUCCESS);
6951: }
6953: /*@C
6954: MatICCFactorSymbolic - Performs symbolic incomplete
6955: Cholesky factorization for a symmetric matrix. Use
6956: `MatCholeskyFactorNumeric()` to complete the factorization.
6958: Collective
6960: Input Parameters:
6961: + fact - the factorized matrix obtained with `MatGetFactor()`
6962: . mat - the matrix to be factored
6963: . perm - row and column permutation
6964: - info - structure containing
6965: .vb
6966: levels - number of levels of fill.
6967: expected fill - as ratio of original fill.
6968: .ve
6970: Level: developer
6972: Notes:
6973: Most users should employ the `KSP` interface for linear solvers
6974: instead of working directly with matrix algebra routines such as this.
6975: See, e.g., `KSPCreate()`.
6977: This uses the definition of level of fill as in Y. Saad {cite}`saad2003`
6979: Developer Note:
6980: The Fortran interface is not autogenerated as the
6981: interface definition cannot be generated correctly [due to `MatFactorInfo`]
6983: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
6984: @*/
6985: PetscErrorCode MatICCFactorSymbolic(Mat fact, Mat mat, IS perm, const MatFactorInfo *info)
6986: {
6987: PetscFunctionBegin;
6991: PetscAssertPointer(info, 4);
6992: PetscAssertPointer(fact, 1);
6993: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
6994: PetscCheck(info->levels >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Levels negative %" PetscInt_FMT, (PetscInt)info->levels);
6995: PetscCheck(info->fill >= 1.0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Expected fill less than 1.0 %g", (double)info->fill);
6996: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
6997: MatCheckPreallocated(mat, 2);
6999: if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7000: PetscUseTypeMethod(fact, iccfactorsymbolic, mat, perm, info);
7001: if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic, mat, perm, 0, 0));
7002: PetscFunctionReturn(PETSC_SUCCESS);
7003: }
7005: /*@C
7006: MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
7007: points to an array of valid matrices, they may be reused to store the new
7008: submatrices.
7010: Collective
7012: Input Parameters:
7013: + mat - the matrix
7014: . n - the number of submatrixes to be extracted (on this processor, may be zero)
7015: . irow - index set of rows to extract
7016: . icol - index set of columns to extract
7017: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7019: Output Parameter:
7020: . submat - the array of submatrices
7022: Level: advanced
7024: Notes:
7025: `MatCreateSubMatrices()` can extract ONLY sequential submatrices
7026: (from both sequential and parallel matrices). Use `MatCreateSubMatrix()`
7027: to extract a parallel submatrix.
7029: Some matrix types place restrictions on the row and column
7030: indices, such as that they be sorted or that they be equal to each other.
7032: The index sets may not have duplicate entries.
7034: When extracting submatrices from a parallel matrix, each processor can
7035: form a different submatrix by setting the rows and columns of its
7036: individual index sets according to the local submatrix desired.
7038: When finished using the submatrices, the user should destroy
7039: them with `MatDestroySubMatrices()`.
7041: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
7042: original matrix has not changed from that last call to `MatCreateSubMatrices()`.
7044: This routine creates the matrices in submat; you should NOT create them before
7045: calling it. It also allocates the array of matrix pointers submat.
7047: For `MATBAIJ` matrices the index sets must respect the block structure, that is if they
7048: request one row/column in a block, they must request all rows/columns that are in
7049: that block. For example, if the block size is 2 you cannot request just row 0 and
7050: column 0.
7052: Fortran Note:
7053: The Fortran interface is slightly different from that given below; it
7054: requires one to pass in as `submat` a `Mat` (integer) array of size at least n+1.
7056: .seealso: [](ch_matrices), `Mat`, `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7057: @*/
7058: PetscErrorCode MatCreateSubMatrices(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7059: {
7060: PetscInt i;
7061: PetscBool eq;
7063: PetscFunctionBegin;
7066: if (n) {
7067: PetscAssertPointer(irow, 3);
7069: PetscAssertPointer(icol, 4);
7071: }
7072: PetscAssertPointer(submat, 6);
7073: if (n && scall == MAT_REUSE_MATRIX) {
7074: PetscAssertPointer(*submat, 6);
7076: }
7077: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7078: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7079: MatCheckPreallocated(mat, 1);
7080: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7081: PetscUseTypeMethod(mat, createsubmatrices, n, irow, icol, scall, submat);
7082: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7083: for (i = 0; i < n; i++) {
7084: (*submat)[i]->factortype = MAT_FACTOR_NONE; /* in case in place factorization was previously done on submatrix */
7085: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7086: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7087: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
7088: if (mat->boundtocpu && mat->bindingpropagates) {
7089: PetscCall(MatBindToCPU((*submat)[i], PETSC_TRUE));
7090: PetscCall(MatSetBindingPropagates((*submat)[i], PETSC_TRUE));
7091: }
7092: #endif
7093: }
7094: PetscFunctionReturn(PETSC_SUCCESS);
7095: }
7097: /*@C
7098: MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of `IS` that may live on subcomms).
7100: Collective
7102: Input Parameters:
7103: + mat - the matrix
7104: . n - the number of submatrixes to be extracted
7105: . irow - index set of rows to extract
7106: . icol - index set of columns to extract
7107: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
7109: Output Parameter:
7110: . submat - the array of submatrices
7112: Level: advanced
7114: Note:
7115: This is used by `PCGASM`
7117: .seealso: [](ch_matrices), `Mat`, `PCGASM`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
7118: @*/
7119: PetscErrorCode MatCreateSubMatricesMPI(Mat mat, PetscInt n, const IS irow[], const IS icol[], MatReuse scall, Mat *submat[])
7120: {
7121: PetscInt i;
7122: PetscBool eq;
7124: PetscFunctionBegin;
7127: if (n) {
7128: PetscAssertPointer(irow, 3);
7130: PetscAssertPointer(icol, 4);
7132: }
7133: PetscAssertPointer(submat, 6);
7134: if (n && scall == MAT_REUSE_MATRIX) {
7135: PetscAssertPointer(*submat, 6);
7137: }
7138: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7139: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7140: MatCheckPreallocated(mat, 1);
7142: PetscCall(PetscLogEventBegin(MAT_CreateSubMats, mat, 0, 0, 0));
7143: PetscUseTypeMethod(mat, createsubmatricesmpi, n, irow, icol, scall, submat);
7144: PetscCall(PetscLogEventEnd(MAT_CreateSubMats, mat, 0, 0, 0));
7145: for (i = 0; i < n; i++) {
7146: PetscCall(ISEqualUnsorted(irow[i], icol[i], &eq));
7147: if (eq) PetscCall(MatPropagateSymmetryOptions(mat, (*submat)[i]));
7148: }
7149: PetscFunctionReturn(PETSC_SUCCESS);
7150: }
7152: /*@C
7153: MatDestroyMatrices - Destroys an array of matrices.
7155: Collective
7157: Input Parameters:
7158: + n - the number of local matrices
7159: - mat - the matrices (this is a pointer to the array of matrices)
7161: Level: advanced
7163: Note:
7164: Frees not only the matrices, but also the array that contains the matrices
7166: Fortran Note:
7167: This does not free the array.
7169: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()` `MatDestroySubMatrices()`
7170: @*/
7171: PetscErrorCode MatDestroyMatrices(PetscInt n, Mat *mat[])
7172: {
7173: PetscInt i;
7175: PetscFunctionBegin;
7176: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7177: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7178: PetscAssertPointer(mat, 2);
7180: for (i = 0; i < n; i++) PetscCall(MatDestroy(&(*mat)[i]));
7182: /* memory is allocated even if n = 0 */
7183: PetscCall(PetscFree(*mat));
7184: PetscFunctionReturn(PETSC_SUCCESS);
7185: }
7187: /*@C
7188: MatDestroySubMatrices - Destroys a set of matrices obtained with `MatCreateSubMatrices()`.
7190: Collective
7192: Input Parameters:
7193: + n - the number of local matrices
7194: - mat - the matrices (this is a pointer to the array of matrices, just to match the calling
7195: sequence of `MatCreateSubMatrices()`)
7197: Level: advanced
7199: Note:
7200: Frees not only the matrices, but also the array that contains the matrices
7202: Fortran Note:
7203: This does not free the array.
7205: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7206: @*/
7207: PetscErrorCode MatDestroySubMatrices(PetscInt n, Mat *mat[])
7208: {
7209: Mat mat0;
7211: PetscFunctionBegin;
7212: if (!*mat) PetscFunctionReturn(PETSC_SUCCESS);
7213: /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7214: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Trying to destroy negative number of matrices %" PetscInt_FMT, n);
7215: PetscAssertPointer(mat, 2);
7217: mat0 = (*mat)[0];
7218: if (mat0 && mat0->ops->destroysubmatrices) {
7219: PetscCall((*mat0->ops->destroysubmatrices)(n, mat));
7220: } else {
7221: PetscCall(MatDestroyMatrices(n, mat));
7222: }
7223: PetscFunctionReturn(PETSC_SUCCESS);
7224: }
7226: /*@C
7227: MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7229: Collective
7231: Input Parameter:
7232: . mat - the matrix
7234: Output Parameter:
7235: . matstruct - the sequential matrix with the nonzero structure of mat
7237: Level: developer
7239: .seealso: [](ch_matrices), `Mat`, `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7240: @*/
7241: PetscErrorCode MatGetSeqNonzeroStructure(Mat mat, Mat *matstruct)
7242: {
7243: PetscFunctionBegin;
7245: PetscAssertPointer(matstruct, 2);
7248: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7249: MatCheckPreallocated(mat, 1);
7251: PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7252: PetscUseTypeMethod(mat, getseqnonzerostructure, matstruct);
7253: PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure, mat, 0, 0, 0));
7254: PetscFunctionReturn(PETSC_SUCCESS);
7255: }
7257: /*@C
7258: MatDestroySeqNonzeroStructure - Destroys matrix obtained with `MatGetSeqNonzeroStructure()`.
7260: Collective
7262: Input Parameter:
7263: . mat - the matrix (this is a pointer to the array of matrices, just to match the calling
7264: sequence of `MatGetSeqNonzeroStructure()`)
7266: Level: advanced
7268: Note:
7269: Frees not only the matrices, but also the array that contains the matrices
7271: .seealso: [](ch_matrices), `Mat`, `MatGetSeqNonzeroStructure()`
7272: @*/
7273: PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7274: {
7275: PetscFunctionBegin;
7276: PetscAssertPointer(mat, 1);
7277: PetscCall(MatDestroy(mat));
7278: PetscFunctionReturn(PETSC_SUCCESS);
7279: }
7281: /*@
7282: MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7283: replaces the index sets by larger ones that represent submatrices with
7284: additional overlap.
7286: Collective
7288: Input Parameters:
7289: + mat - the matrix
7290: . n - the number of index sets
7291: . is - the array of index sets (these index sets will changed during the call)
7292: - ov - the additional overlap requested
7294: Options Database Key:
7295: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7297: Level: developer
7299: Note:
7300: The computed overlap preserves the matrix block sizes when the blocks are square.
7301: That is: if a matrix nonzero for a given block would increase the overlap all columns associated with
7302: that block are included in the overlap regardless of whether each specific column would increase the overlap.
7304: .seealso: [](ch_matrices), `Mat`, `PCASM`, `MatSetBlockSize()`, `MatIncreaseOverlapSplit()`, `MatCreateSubMatrices()`
7305: @*/
7306: PetscErrorCode MatIncreaseOverlap(Mat mat, PetscInt n, IS is[], PetscInt ov)
7307: {
7308: PetscInt i, bs, cbs;
7310: PetscFunctionBegin;
7314: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7315: if (n) {
7316: PetscAssertPointer(is, 3);
7318: }
7319: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7320: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7321: MatCheckPreallocated(mat, 1);
7323: if (!ov || !n) PetscFunctionReturn(PETSC_SUCCESS);
7324: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7325: PetscUseTypeMethod(mat, increaseoverlap, n, is, ov);
7326: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7327: PetscCall(MatGetBlockSizes(mat, &bs, &cbs));
7328: if (bs == cbs) {
7329: for (i = 0; i < n; i++) PetscCall(ISSetBlockSize(is[i], bs));
7330: }
7331: PetscFunctionReturn(PETSC_SUCCESS);
7332: }
7334: PetscErrorCode MatIncreaseOverlapSplit_Single(Mat, IS *, PetscInt);
7336: /*@
7337: MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7338: a sub communicator, replaces the index sets by larger ones that represent submatrices with
7339: additional overlap.
7341: Collective
7343: Input Parameters:
7344: + mat - the matrix
7345: . n - the number of index sets
7346: . is - the array of index sets (these index sets will changed during the call)
7347: - ov - the additional overlap requested
7349: ` Options Database Key:
7350: . -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7352: Level: developer
7354: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatIncreaseOverlap()`
7355: @*/
7356: PetscErrorCode MatIncreaseOverlapSplit(Mat mat, PetscInt n, IS is[], PetscInt ov)
7357: {
7358: PetscInt i;
7360: PetscFunctionBegin;
7363: PetscCheck(n >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Must have one or more domains, you have %" PetscInt_FMT, n);
7364: if (n) {
7365: PetscAssertPointer(is, 3);
7367: }
7368: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
7369: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
7370: MatCheckPreallocated(mat, 1);
7371: if (!ov) PetscFunctionReturn(PETSC_SUCCESS);
7372: PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap, mat, 0, 0, 0));
7373: for (i = 0; i < n; i++) PetscCall(MatIncreaseOverlapSplit_Single(mat, &is[i], ov));
7374: PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap, mat, 0, 0, 0));
7375: PetscFunctionReturn(PETSC_SUCCESS);
7376: }
7378: /*@
7379: MatGetBlockSize - Returns the matrix block size.
7381: Not Collective
7383: Input Parameter:
7384: . mat - the matrix
7386: Output Parameter:
7387: . bs - block size
7389: Level: intermediate
7391: Notes:
7392: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7394: If the block size has not been set yet this routine returns 1.
7396: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7397: @*/
7398: PetscErrorCode MatGetBlockSize(Mat mat, PetscInt *bs)
7399: {
7400: PetscFunctionBegin;
7402: PetscAssertPointer(bs, 2);
7403: *bs = PetscAbs(mat->rmap->bs);
7404: PetscFunctionReturn(PETSC_SUCCESS);
7405: }
7407: /*@
7408: MatGetBlockSizes - Returns the matrix block row and column sizes.
7410: Not Collective
7412: Input Parameter:
7413: . mat - the matrix
7415: Output Parameters:
7416: + rbs - row block size
7417: - cbs - column block size
7419: Level: intermediate
7421: Notes:
7422: Block row formats are `MATBAIJ` and `MATSBAIJ` ALWAYS have square block storage in the matrix.
7423: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7425: If a block size has not been set yet this routine returns 1.
7427: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7428: @*/
7429: PetscErrorCode MatGetBlockSizes(Mat mat, PetscInt *rbs, PetscInt *cbs)
7430: {
7431: PetscFunctionBegin;
7433: if (rbs) PetscAssertPointer(rbs, 2);
7434: if (cbs) PetscAssertPointer(cbs, 3);
7435: if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7436: if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7437: PetscFunctionReturn(PETSC_SUCCESS);
7438: }
7440: /*@
7441: MatSetBlockSize - Sets the matrix block size.
7443: Logically Collective
7445: Input Parameters:
7446: + mat - the matrix
7447: - bs - block size
7449: Level: intermediate
7451: Notes:
7452: Block row formats are `MATBAIJ` and `MATSBAIJ` formats ALWAYS have square block storage in the matrix.
7453: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7455: For `MATAIJ` matrix format, this function can be called at a later stage, provided that the specified block size
7456: is compatible with the matrix local sizes.
7458: .seealso: [](ch_matrices), `Mat`, `MATBAIJ`, `MATSBAIJ`, `MATAIJ`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7459: @*/
7460: PetscErrorCode MatSetBlockSize(Mat mat, PetscInt bs)
7461: {
7462: PetscFunctionBegin;
7465: PetscCall(MatSetBlockSizes(mat, bs, bs));
7466: PetscFunctionReturn(PETSC_SUCCESS);
7467: }
7469: typedef struct {
7470: PetscInt n;
7471: IS *is;
7472: Mat *mat;
7473: PetscObjectState nonzerostate;
7474: Mat C;
7475: } EnvelopeData;
7477: static PetscErrorCode EnvelopeDataDestroy(EnvelopeData *edata)
7478: {
7479: for (PetscInt i = 0; i < edata->n; i++) PetscCall(ISDestroy(&edata->is[i]));
7480: PetscCall(PetscFree(edata->is));
7481: PetscCall(PetscFree(edata));
7482: return PETSC_SUCCESS;
7483: }
7485: /*@
7486: MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7487: the sizes of these blocks in the matrix. An individual block may lie over several processes.
7489: Collective
7491: Input Parameter:
7492: . mat - the matrix
7494: Level: intermediate
7496: Notes:
7497: There can be zeros within the blocks
7499: The blocks can overlap between processes, including laying on more than two processes
7501: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatSetVariableBlockSizes()`
7502: @*/
7503: PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7504: {
7505: PetscInt n, *sizes, *starts, i = 0, env = 0, tbs = 0, lblocks = 0, rstart, II, ln = 0, cnt = 0, cstart, cend;
7506: PetscInt *diag, *odiag, sc;
7507: VecScatter scatter;
7508: PetscScalar *seqv;
7509: const PetscScalar *parv;
7510: const PetscInt *ia, *ja;
7511: PetscBool set, flag, done;
7512: Mat AA = mat, A;
7513: MPI_Comm comm;
7514: PetscMPIInt rank, size, tag;
7515: MPI_Status status;
7516: PetscContainer container;
7517: EnvelopeData *edata;
7518: Vec seq, par;
7519: IS isglobal;
7521: PetscFunctionBegin;
7523: PetscCall(MatIsSymmetricKnown(mat, &set, &flag));
7524: if (!set || !flag) {
7525: /* TODO: only needs nonzero structure of transpose */
7526: PetscCall(MatTranspose(mat, MAT_INITIAL_MATRIX, &AA));
7527: PetscCall(MatAXPY(AA, 1.0, mat, DIFFERENT_NONZERO_PATTERN));
7528: }
7529: PetscCall(MatAIJGetLocalMat(AA, &A));
7530: PetscCall(MatGetRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7531: PetscCheck(done, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Unable to get IJ structure from matrix");
7533: PetscCall(MatGetLocalSize(mat, &n, NULL));
7534: PetscCall(PetscObjectGetNewTag((PetscObject)mat, &tag));
7535: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
7536: PetscCallMPI(MPI_Comm_size(comm, &size));
7537: PetscCallMPI(MPI_Comm_rank(comm, &rank));
7539: PetscCall(PetscMalloc2(n, &sizes, n, &starts));
7541: if (rank > 0) {
7542: PetscCallMPI(MPI_Recv(&env, 1, MPIU_INT, rank - 1, tag, comm, &status));
7543: PetscCallMPI(MPI_Recv(&tbs, 1, MPIU_INT, rank - 1, tag, comm, &status));
7544: }
7545: PetscCall(MatGetOwnershipRange(mat, &rstart, NULL));
7546: for (i = 0; i < n; i++) {
7547: env = PetscMax(env, ja[ia[i + 1] - 1]);
7548: II = rstart + i;
7549: if (env == II) {
7550: starts[lblocks] = tbs;
7551: sizes[lblocks++] = 1 + II - tbs;
7552: tbs = 1 + II;
7553: }
7554: }
7555: if (rank < size - 1) {
7556: PetscCallMPI(MPI_Send(&env, 1, MPIU_INT, rank + 1, tag, comm));
7557: PetscCallMPI(MPI_Send(&tbs, 1, MPIU_INT, rank + 1, tag, comm));
7558: }
7560: PetscCall(MatRestoreRowIJ(A, 0, PETSC_FALSE, PETSC_FALSE, &n, &ia, &ja, &done));
7561: if (!set || !flag) PetscCall(MatDestroy(&AA));
7562: PetscCall(MatDestroy(&A));
7564: PetscCall(PetscNew(&edata));
7565: PetscCall(MatGetNonzeroState(mat, &edata->nonzerostate));
7566: edata->n = lblocks;
7567: /* create IS needed for extracting blocks from the original matrix */
7568: PetscCall(PetscMalloc1(lblocks, &edata->is));
7569: for (PetscInt i = 0; i < lblocks; i++) PetscCall(ISCreateStride(PETSC_COMM_SELF, sizes[i], starts[i], 1, &edata->is[i]));
7571: /* Create the resulting inverse matrix structure with preallocation information */
7572: PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &edata->C));
7573: PetscCall(MatSetSizes(edata->C, mat->rmap->n, mat->cmap->n, mat->rmap->N, mat->cmap->N));
7574: PetscCall(MatSetBlockSizesFromMats(edata->C, mat, mat));
7575: PetscCall(MatSetType(edata->C, MATAIJ));
7577: /* Communicate the start and end of each row, from each block to the correct rank */
7578: /* TODO: Use PetscSF instead of VecScatter */
7579: for (PetscInt i = 0; i < lblocks; i++) ln += sizes[i];
7580: PetscCall(VecCreateSeq(PETSC_COMM_SELF, 2 * ln, &seq));
7581: PetscCall(VecGetArrayWrite(seq, &seqv));
7582: for (PetscInt i = 0; i < lblocks; i++) {
7583: for (PetscInt j = 0; j < sizes[i]; j++) {
7584: seqv[cnt] = starts[i];
7585: seqv[cnt + 1] = starts[i] + sizes[i];
7586: cnt += 2;
7587: }
7588: }
7589: PetscCall(VecRestoreArrayWrite(seq, &seqv));
7590: PetscCallMPI(MPI_Scan(&cnt, &sc, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)mat)));
7591: sc -= cnt;
7592: PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat), 2 * mat->rmap->n, 2 * mat->rmap->N, &par));
7593: PetscCall(ISCreateStride(PETSC_COMM_SELF, cnt, sc, 1, &isglobal));
7594: PetscCall(VecScatterCreate(seq, NULL, par, isglobal, &scatter));
7595: PetscCall(ISDestroy(&isglobal));
7596: PetscCall(VecScatterBegin(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7597: PetscCall(VecScatterEnd(scatter, seq, par, INSERT_VALUES, SCATTER_FORWARD));
7598: PetscCall(VecScatterDestroy(&scatter));
7599: PetscCall(VecDestroy(&seq));
7600: PetscCall(MatGetOwnershipRangeColumn(mat, &cstart, &cend));
7601: PetscCall(PetscMalloc2(mat->rmap->n, &diag, mat->rmap->n, &odiag));
7602: PetscCall(VecGetArrayRead(par, &parv));
7603: cnt = 0;
7604: PetscCall(MatGetSize(mat, NULL, &n));
7605: for (PetscInt i = 0; i < mat->rmap->n; i++) {
7606: PetscInt start, end, d = 0, od = 0;
7608: start = (PetscInt)PetscRealPart(parv[cnt]);
7609: end = (PetscInt)PetscRealPart(parv[cnt + 1]);
7610: cnt += 2;
7612: if (start < cstart) {
7613: od += cstart - start + n - cend;
7614: d += cend - cstart;
7615: } else if (start < cend) {
7616: od += n - cend;
7617: d += cend - start;
7618: } else od += n - start;
7619: if (end <= cstart) {
7620: od -= cstart - end + n - cend;
7621: d -= cend - cstart;
7622: } else if (end < cend) {
7623: od -= n - cend;
7624: d -= cend - end;
7625: } else od -= n - end;
7627: odiag[i] = od;
7628: diag[i] = d;
7629: }
7630: PetscCall(VecRestoreArrayRead(par, &parv));
7631: PetscCall(VecDestroy(&par));
7632: PetscCall(MatXAIJSetPreallocation(edata->C, mat->rmap->bs, diag, odiag, NULL, NULL));
7633: PetscCall(PetscFree2(diag, odiag));
7634: PetscCall(PetscFree2(sizes, starts));
7636: PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &container));
7637: PetscCall(PetscContainerSetPointer(container, edata));
7638: PetscCall(PetscContainerSetUserDestroy(container, (PetscErrorCode(*)(void *))EnvelopeDataDestroy));
7639: PetscCall(PetscObjectCompose((PetscObject)mat, "EnvelopeData", (PetscObject)container));
7640: PetscCall(PetscObjectDereference((PetscObject)container));
7641: PetscFunctionReturn(PETSC_SUCCESS);
7642: }
7644: /*@
7645: MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7647: Collective
7649: Input Parameters:
7650: + A - the matrix
7651: - reuse - indicates if the `C` matrix was obtained from a previous call to this routine
7653: Output Parameter:
7654: . C - matrix with inverted block diagonal of `A`
7656: Level: advanced
7658: Note:
7659: For efficiency the matrix `A` should have all the nonzero entries clustered in smallish blocks along the diagonal.
7661: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatComputeBlockDiagonal()`
7662: @*/
7663: PetscErrorCode MatInvertVariableBlockEnvelope(Mat A, MatReuse reuse, Mat *C)
7664: {
7665: PetscContainer container;
7666: EnvelopeData *edata;
7667: PetscObjectState nonzerostate;
7669: PetscFunctionBegin;
7670: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7671: if (!container) {
7672: PetscCall(MatComputeVariableBlockEnvelope(A));
7673: PetscCall(PetscObjectQuery((PetscObject)A, "EnvelopeData", (PetscObject *)&container));
7674: }
7675: PetscCall(PetscContainerGetPointer(container, (void **)&edata));
7676: PetscCall(MatGetNonzeroState(A, &nonzerostate));
7677: PetscCheck(nonzerostate <= edata->nonzerostate, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot handle changes to matrix nonzero structure");
7678: PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "C matrix must be the same as previously output");
7680: PetscCall(MatCreateSubMatrices(A, edata->n, edata->is, edata->is, MAT_INITIAL_MATRIX, &edata->mat));
7681: *C = edata->C;
7683: for (PetscInt i = 0; i < edata->n; i++) {
7684: Mat D;
7685: PetscScalar *dvalues;
7687: PetscCall(MatConvert(edata->mat[i], MATSEQDENSE, MAT_INITIAL_MATRIX, &D));
7688: PetscCall(MatSetOption(*C, MAT_ROW_ORIENTED, PETSC_FALSE));
7689: PetscCall(MatSeqDenseInvert(D));
7690: PetscCall(MatDenseGetArray(D, &dvalues));
7691: PetscCall(MatSetValuesIS(*C, edata->is[i], edata->is[i], dvalues, INSERT_VALUES));
7692: PetscCall(MatDestroy(&D));
7693: }
7694: PetscCall(MatDestroySubMatrices(edata->n, &edata->mat));
7695: PetscCall(MatAssemblyBegin(*C, MAT_FINAL_ASSEMBLY));
7696: PetscCall(MatAssemblyEnd(*C, MAT_FINAL_ASSEMBLY));
7697: PetscFunctionReturn(PETSC_SUCCESS);
7698: }
7700: /*@
7701: MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7703: Logically Collective
7705: Input Parameters:
7706: + mat - the matrix
7707: . nblocks - the number of blocks on this process, each block can only exist on a single process
7708: - bsizes - the block sizes
7710: Level: intermediate
7712: Notes:
7713: Currently used by `PCVPBJACOBI` for `MATAIJ` matrices
7715: Each variable point-block set of degrees of freedom must live on a single MPI process. That is a point block cannot straddle two MPI processes.
7717: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`,
7718: `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7719: @*/
7720: PetscErrorCode MatSetVariableBlockSizes(Mat mat, PetscInt nblocks, PetscInt *bsizes)
7721: {
7722: PetscInt i, ncnt = 0, nlocal;
7724: PetscFunctionBegin;
7726: PetscCheck(nblocks >= 0, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Number of local blocks must be great than or equal to zero");
7727: PetscCall(MatGetLocalSize(mat, &nlocal, NULL));
7728: for (i = 0; i < nblocks; i++) ncnt += bsizes[i];
7729: PetscCheck(ncnt == nlocal, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT, ncnt, nlocal);
7730: PetscCall(PetscFree(mat->bsizes));
7731: mat->nblocks = nblocks;
7732: PetscCall(PetscMalloc1(nblocks, &mat->bsizes));
7733: PetscCall(PetscArraycpy(mat->bsizes, bsizes, nblocks));
7734: PetscFunctionReturn(PETSC_SUCCESS);
7735: }
7737: /*@C
7738: MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7740: Logically Collective; No Fortran Support
7742: Input Parameter:
7743: . mat - the matrix
7745: Output Parameters:
7746: + nblocks - the number of blocks on this process
7747: - bsizes - the block sizes
7749: Level: intermediate
7751: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7752: @*/
7753: PetscErrorCode MatGetVariableBlockSizes(Mat mat, PetscInt *nblocks, const PetscInt **bsizes)
7754: {
7755: PetscFunctionBegin;
7757: *nblocks = mat->nblocks;
7758: *bsizes = mat->bsizes;
7759: PetscFunctionReturn(PETSC_SUCCESS);
7760: }
7762: /*@
7763: MatSetBlockSizes - Sets the matrix block row and column sizes.
7765: Logically Collective
7767: Input Parameters:
7768: + mat - the matrix
7769: . rbs - row block size
7770: - cbs - column block size
7772: Level: intermediate
7774: Notes:
7775: Block row formats are `MATBAIJ` and `MATSBAIJ`. These formats ALWAYS have square block storage in the matrix.
7776: If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7777: This must be called before `MatSetUp()` or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7779: For `MATAIJ` matrix this function can be called at a later stage, provided that the specified block sizes
7780: are compatible with the matrix local sizes.
7782: The row and column block size determine the blocksize of the "row" and "column" vectors returned by `MatCreateVecs()`.
7784: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7785: @*/
7786: PetscErrorCode MatSetBlockSizes(Mat mat, PetscInt rbs, PetscInt cbs)
7787: {
7788: PetscFunctionBegin;
7792: PetscTryTypeMethod(mat, setblocksizes, rbs, cbs);
7793: if (mat->rmap->refcnt) {
7794: ISLocalToGlobalMapping l2g = NULL;
7795: PetscLayout nmap = NULL;
7797: PetscCall(PetscLayoutDuplicate(mat->rmap, &nmap));
7798: if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping, &l2g));
7799: PetscCall(PetscLayoutDestroy(&mat->rmap));
7800: mat->rmap = nmap;
7801: mat->rmap->mapping = l2g;
7802: }
7803: if (mat->cmap->refcnt) {
7804: ISLocalToGlobalMapping l2g = NULL;
7805: PetscLayout nmap = NULL;
7807: PetscCall(PetscLayoutDuplicate(mat->cmap, &nmap));
7808: if (mat->cmap->mapping) PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping, &l2g));
7809: PetscCall(PetscLayoutDestroy(&mat->cmap));
7810: mat->cmap = nmap;
7811: mat->cmap->mapping = l2g;
7812: }
7813: PetscCall(PetscLayoutSetBlockSize(mat->rmap, rbs));
7814: PetscCall(PetscLayoutSetBlockSize(mat->cmap, cbs));
7815: PetscFunctionReturn(PETSC_SUCCESS);
7816: }
7818: /*@
7819: MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7821: Logically Collective
7823: Input Parameters:
7824: + mat - the matrix
7825: . fromRow - matrix from which to copy row block size
7826: - fromCol - matrix from which to copy column block size (can be same as fromRow)
7828: Level: developer
7830: .seealso: [](ch_matrices), `Mat`, `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7831: @*/
7832: PetscErrorCode MatSetBlockSizesFromMats(Mat mat, Mat fromRow, Mat fromCol)
7833: {
7834: PetscFunctionBegin;
7838: if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap, fromRow->rmap->bs));
7839: if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap, fromCol->cmap->bs));
7840: PetscFunctionReturn(PETSC_SUCCESS);
7841: }
7843: /*@
7844: MatResidual - Default routine to calculate the residual r = b - Ax
7846: Collective
7848: Input Parameters:
7849: + mat - the matrix
7850: . b - the right-hand-side
7851: - x - the approximate solution
7853: Output Parameter:
7854: . r - location to store the residual
7856: Level: developer
7858: .seealso: [](ch_matrices), `Mat`, `MatMult()`, `MatMultAdd()`, `PCMGSetResidual()`
7859: @*/
7860: PetscErrorCode MatResidual(Mat mat, Vec b, Vec x, Vec r)
7861: {
7862: PetscFunctionBegin;
7868: MatCheckPreallocated(mat, 1);
7869: PetscCall(PetscLogEventBegin(MAT_Residual, mat, 0, 0, 0));
7870: if (!mat->ops->residual) {
7871: PetscCall(MatMult(mat, x, r));
7872: PetscCall(VecAYPX(r, -1.0, b));
7873: } else {
7874: PetscUseTypeMethod(mat, residual, b, x, r);
7875: }
7876: PetscCall(PetscLogEventEnd(MAT_Residual, mat, 0, 0, 0));
7877: PetscFunctionReturn(PETSC_SUCCESS);
7878: }
7880: /*MC
7881: MatGetRowIJF90 - Obtains the compressed row storage i and j indices for the local rows of a sparse matrix
7883: Synopsis:
7884: MatGetRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7886: Not Collective
7888: Input Parameters:
7889: + A - the matrix
7890: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7891: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7892: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7893: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7894: always used.
7896: Output Parameters:
7897: + n - number of local rows in the (possibly compressed) matrix
7898: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7899: . ja - the column indices
7900: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7901: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7903: Level: developer
7905: Note:
7906: Use `MatRestoreRowIJF90()` when you no longer need access to the data
7908: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatRestoreRowIJF90()`
7909: M*/
7911: /*MC
7912: MatRestoreRowIJF90 - restores the compressed row storage i and j indices for the local rows of a sparse matrix obtained with `MatGetRowIJF90()`
7914: Synopsis:
7915: MatRestoreRowIJF90(Mat A, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt n, {PetscInt, pointer :: ia(:)}, {PetscInt, pointer :: ja(:)}, PetscBool done,integer ierr)
7917: Not Collective
7919: Input Parameters:
7920: + A - the matrix
7921: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7922: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7923: inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7924: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7925: always used.
7926: . n - number of local rows in the (possibly compressed) matrix
7927: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7928: . ja - the column indices
7929: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7930: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7932: Level: developer
7934: .seealso: [](ch_matrices), [](sec_fortranarrays), `Mat`, `MATMPIAIJ`, `MatGetRowIJ()`, `MatRestoreRowIJ()`, `MatGetRowIJF90()`
7935: M*/
7937: /*@C
7938: MatGetRowIJ - Returns the compressed row storage i and j indices for the local rows of a sparse matrix
7940: Collective
7942: Input Parameters:
7943: + mat - the matrix
7944: . shift - 0 or 1 indicating we want the indices starting at 0 or 1
7945: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
7946: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
7947: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
7948: always used.
7950: Output Parameters:
7951: + n - number of local rows in the (possibly compressed) matrix, use `NULL` if not needed
7952: . ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix, use `NULL` if not needed
7953: . ja - the column indices, use `NULL` if not needed
7954: - done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7955: are responsible for handling the case when done == `PETSC_FALSE` and ia and ja are not set
7957: Level: developer
7959: Notes:
7960: You CANNOT change any of the ia[] or ja[] values.
7962: Use `MatRestoreRowIJ()` when you are finished accessing the ia[] and ja[] values.
7964: Fortran Notes:
7965: Use
7966: .vb
7967: PetscInt, pointer :: ia(:),ja(:)
7968: call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7969: ! Access the ith and jth entries via ia(i) and ja(j)
7970: .ve
7972: `MatGetRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatGetRowIJF90()`
7974: .seealso: [](ch_matrices), `Mat`, `MATAIJ`, `MatGetRowIJF90()`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
7975: @*/
7976: PetscErrorCode MatGetRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
7977: {
7978: PetscFunctionBegin;
7981: if (n) PetscAssertPointer(n, 5);
7982: if (ia) PetscAssertPointer(ia, 6);
7983: if (ja) PetscAssertPointer(ja, 7);
7984: if (done) PetscAssertPointer(done, 8);
7985: MatCheckPreallocated(mat, 1);
7986: if (!mat->ops->getrowij && done) *done = PETSC_FALSE;
7987: else {
7988: if (done) *done = PETSC_TRUE;
7989: PetscCall(PetscLogEventBegin(MAT_GetRowIJ, mat, 0, 0, 0));
7990: PetscUseTypeMethod(mat, getrowij, shift, symmetric, inodecompressed, n, ia, ja, done);
7991: PetscCall(PetscLogEventEnd(MAT_GetRowIJ, mat, 0, 0, 0));
7992: }
7993: PetscFunctionReturn(PETSC_SUCCESS);
7994: }
7996: /*@C
7997: MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7999: Collective
8001: Input Parameters:
8002: + mat - the matrix
8003: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8004: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be
8005: symmetrized
8006: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8007: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8008: always used.
8009: . n - number of columns in the (possibly compressed) matrix
8010: . ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
8011: - ja - the row indices
8013: Output Parameter:
8014: . done - `PETSC_TRUE` or `PETSC_FALSE`, indicating whether the values have been returned
8016: Level: developer
8018: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreColumnIJ()`
8019: @*/
8020: PetscErrorCode MatGetColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8021: {
8022: PetscFunctionBegin;
8025: PetscAssertPointer(n, 5);
8026: if (ia) PetscAssertPointer(ia, 6);
8027: if (ja) PetscAssertPointer(ja, 7);
8028: PetscAssertPointer(done, 8);
8029: MatCheckPreallocated(mat, 1);
8030: if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
8031: else {
8032: *done = PETSC_TRUE;
8033: PetscUseTypeMethod(mat, getcolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8034: }
8035: PetscFunctionReturn(PETSC_SUCCESS);
8036: }
8038: /*@C
8039: MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with `MatGetRowIJ()`.
8041: Collective
8043: Input Parameters:
8044: + mat - the matrix
8045: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8046: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8047: . inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8048: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8049: always used.
8050: . n - size of (possibly compressed) matrix
8051: . ia - the row pointers
8052: - ja - the column indices
8054: Output Parameter:
8055: . done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8057: Level: developer
8059: Note:
8060: This routine zeros out `n`, `ia`, and `ja`. This is to prevent accidental
8061: us of the array after it has been restored. If you pass `NULL`, it will
8062: not zero the pointers. Use of ia or ja after `MatRestoreRowIJ()` is invalid.
8064: Fortran Note:
8065: `MatRestoreRowIJ()` Fortran binding is deprecated (since PETSc 3.19), use `MatRestoreRowIJF90()`
8067: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatRestoreRowIJF90()`, `MatRestoreColumnIJ()`
8068: @*/
8069: PetscErrorCode MatRestoreRowIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8070: {
8071: PetscFunctionBegin;
8074: if (ia) PetscAssertPointer(ia, 6);
8075: if (ja) PetscAssertPointer(ja, 7);
8076: if (done) PetscAssertPointer(done, 8);
8077: MatCheckPreallocated(mat, 1);
8079: if (!mat->ops->restorerowij && done) *done = PETSC_FALSE;
8080: else {
8081: if (done) *done = PETSC_TRUE;
8082: PetscUseTypeMethod(mat, restorerowij, shift, symmetric, inodecompressed, n, ia, ja, done);
8083: if (n) *n = 0;
8084: if (ia) *ia = NULL;
8085: if (ja) *ja = NULL;
8086: }
8087: PetscFunctionReturn(PETSC_SUCCESS);
8088: }
8090: /*@C
8091: MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with `MatGetColumnIJ()`.
8093: Collective
8095: Input Parameters:
8096: + mat - the matrix
8097: . shift - 1 or zero indicating we want the indices starting at 0 or 1
8098: . symmetric - `PETSC_TRUE` or `PETSC_FALSE` indicating the matrix data structure should be symmetrized
8099: - inodecompressed - `PETSC_TRUE` or `PETSC_FALSE` indicating if the nonzero structure of the
8100: inodes or the nonzero elements is wanted. For `MATBAIJ` matrices the compressed version is
8101: always used.
8103: Output Parameters:
8104: + n - size of (possibly compressed) matrix
8105: . ia - the column pointers
8106: . ja - the row indices
8107: - done - `PETSC_TRUE` or `PETSC_FALSE` indicated that the values have been returned
8109: Level: developer
8111: .seealso: [](ch_matrices), `Mat`, `MatGetColumnIJ()`, `MatRestoreRowIJ()`
8112: @*/
8113: PetscErrorCode MatRestoreColumnIJ(Mat mat, PetscInt shift, PetscBool symmetric, PetscBool inodecompressed, PetscInt *n, const PetscInt *ia[], const PetscInt *ja[], PetscBool *done)
8114: {
8115: PetscFunctionBegin;
8118: if (ia) PetscAssertPointer(ia, 6);
8119: if (ja) PetscAssertPointer(ja, 7);
8120: PetscAssertPointer(done, 8);
8121: MatCheckPreallocated(mat, 1);
8123: if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
8124: else {
8125: *done = PETSC_TRUE;
8126: PetscUseTypeMethod(mat, restorecolumnij, shift, symmetric, inodecompressed, n, ia, ja, done);
8127: if (n) *n = 0;
8128: if (ia) *ia = NULL;
8129: if (ja) *ja = NULL;
8130: }
8131: PetscFunctionReturn(PETSC_SUCCESS);
8132: }
8134: /*@C
8135: MatColoringPatch - Used inside matrix coloring routines that use `MatGetRowIJ()` and/or
8136: `MatGetColumnIJ()`.
8138: Collective
8140: Input Parameters:
8141: + mat - the matrix
8142: . ncolors - maximum color value
8143: . n - number of entries in colorarray
8144: - colorarray - array indicating color for each column
8146: Output Parameter:
8147: . iscoloring - coloring generated using colorarray information
8149: Level: developer
8151: .seealso: [](ch_matrices), `Mat`, `MatGetRowIJ()`, `MatGetColumnIJ()`
8152: @*/
8153: PetscErrorCode MatColoringPatch(Mat mat, PetscInt ncolors, PetscInt n, ISColoringValue colorarray[], ISColoring *iscoloring)
8154: {
8155: PetscFunctionBegin;
8158: PetscAssertPointer(colorarray, 4);
8159: PetscAssertPointer(iscoloring, 5);
8160: MatCheckPreallocated(mat, 1);
8162: if (!mat->ops->coloringpatch) {
8163: PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat), ncolors, n, colorarray, PETSC_OWN_POINTER, iscoloring));
8164: } else {
8165: PetscUseTypeMethod(mat, coloringpatch, ncolors, n, colorarray, iscoloring);
8166: }
8167: PetscFunctionReturn(PETSC_SUCCESS);
8168: }
8170: /*@
8171: MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
8173: Logically Collective
8175: Input Parameter:
8176: . mat - the factored matrix to be reset
8178: Level: developer
8180: Notes:
8181: This routine should be used only with factored matrices formed by in-place
8182: factorization via ILU(0) (or by in-place LU factorization for the `MATSEQDENSE`
8183: format). This option can save memory, for example, when solving nonlinear
8184: systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
8185: ILU(0) preconditioner.
8187: One can specify in-place ILU(0) factorization by calling
8188: .vb
8189: PCType(pc,PCILU);
8190: PCFactorSeUseInPlace(pc);
8191: .ve
8192: or by using the options -pc_type ilu -pc_factor_in_place
8194: In-place factorization ILU(0) can also be used as a local
8195: solver for the blocks within the block Jacobi or additive Schwarz
8196: methods (runtime option: -sub_pc_factor_in_place). See Users-Manual: ch_pc
8197: for details on setting local solver options.
8199: Most users should employ the `KSP` interface for linear solvers
8200: instead of working directly with matrix algebra routines such as this.
8201: See, e.g., `KSPCreate()`.
8203: .seealso: [](ch_matrices), `Mat`, `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
8204: @*/
8205: PetscErrorCode MatSetUnfactored(Mat mat)
8206: {
8207: PetscFunctionBegin;
8210: MatCheckPreallocated(mat, 1);
8211: mat->factortype = MAT_FACTOR_NONE;
8212: if (!mat->ops->setunfactored) PetscFunctionReturn(PETSC_SUCCESS);
8213: PetscUseTypeMethod(mat, setunfactored);
8214: PetscFunctionReturn(PETSC_SUCCESS);
8215: }
8217: /*MC
8218: MatDenseGetArrayF90 - Accesses a matrix array from Fortran
8220: Synopsis:
8221: MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8223: Not Collective
8225: Input Parameter:
8226: . x - matrix
8228: Output Parameters:
8229: + xx_v - the Fortran pointer to the array
8230: - ierr - error code
8232: Example of Usage:
8233: .vb
8234: PetscScalar, pointer xx_v(:,:)
8235: ....
8236: call MatDenseGetArrayF90(x,xx_v,ierr)
8237: a = xx_v(3)
8238: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8239: .ve
8241: Level: advanced
8243: .seealso: [](ch_matrices), `Mat`, `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8244: M*/
8246: /*MC
8247: MatDenseRestoreArrayF90 - Restores a matrix array that has been
8248: accessed with `MatDenseGetArrayF90()`.
8250: Synopsis:
8251: MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8253: Not Collective
8255: Input Parameters:
8256: + x - matrix
8257: - xx_v - the Fortran90 pointer to the array
8259: Output Parameter:
8260: . ierr - error code
8262: Example of Usage:
8263: .vb
8264: PetscScalar, pointer xx_v(:,:)
8265: ....
8266: call MatDenseGetArrayF90(x,xx_v,ierr)
8267: a = xx_v(3)
8268: call MatDenseRestoreArrayF90(x,xx_v,ierr)
8269: .ve
8271: Level: advanced
8273: .seealso: [](ch_matrices), `Mat`, `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8274: M*/
8276: /*MC
8277: MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran.
8279: Synopsis:
8280: MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8282: Not Collective
8284: Input Parameter:
8285: . x - matrix
8287: Output Parameters:
8288: + xx_v - the Fortran pointer to the array
8289: - ierr - error code
8291: Example of Usage:
8292: .vb
8293: PetscScalar, pointer xx_v(:)
8294: ....
8295: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8296: a = xx_v(3)
8297: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8298: .ve
8300: Level: advanced
8302: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8303: M*/
8305: /*MC
8306: MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8307: accessed with `MatSeqAIJGetArrayF90()`.
8309: Synopsis:
8310: MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8312: Not Collective
8314: Input Parameters:
8315: + x - matrix
8316: - xx_v - the Fortran90 pointer to the array
8318: Output Parameter:
8319: . ierr - error code
8321: Example of Usage:
8322: .vb
8323: PetscScalar, pointer xx_v(:)
8324: ....
8325: call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8326: a = xx_v(3)
8327: call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8328: .ve
8330: Level: advanced
8332: .seealso: [](ch_matrices), `Mat`, `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8333: M*/
8335: /*@
8336: MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8337: as the original matrix.
8339: Collective
8341: Input Parameters:
8342: + mat - the original matrix
8343: . isrow - parallel `IS` containing the rows this processor should obtain
8344: . iscol - parallel `IS` containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8345: - cll - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
8347: Output Parameter:
8348: . newmat - the new submatrix, of the same type as the original matrix
8350: Level: advanced
8352: Notes:
8353: The submatrix will be able to be multiplied with vectors using the same layout as `iscol`.
8355: Some matrix types place restrictions on the row and column indices, such
8356: as that they be sorted or that they be equal to each other. For `MATBAIJ` and `MATSBAIJ` matrices the indices must include all rows/columns of a block;
8357: for example, if the block size is 3 one cannot select the 0 and 2 rows without selecting the 1 row.
8359: The index sets may not have duplicate entries.
8361: The first time this is called you should use a cll of `MAT_INITIAL_MATRIX`,
8362: the `MatCreateSubMatrix()` routine will create the newmat for you. Any additional calls
8363: to this routine with a mat of the same nonzero structure and with a call of `MAT_REUSE_MATRIX`
8364: will reuse the matrix generated the first time. You should call `MatDestroy()` on `newmat` when
8365: you are finished using it.
8367: The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8368: the input matrix.
8370: If `iscol` is `NULL` then all columns are obtained (not supported in Fortran).
8372: If `isrow` and `iscol` have a nontrivial block-size then the resulting matrix has this block-size as well. This feature
8373: is used by `PCFIELDSPLIT` to allow easy nesting of its use.
8375: Example usage:
8376: Consider the following 8x8 matrix with 34 non-zero values, that is
8377: assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8378: proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8379: as follows
8380: .vb
8381: 1 2 0 | 0 3 0 | 0 4
8382: Proc0 0 5 6 | 7 0 0 | 8 0
8383: 9 0 10 | 11 0 0 | 12 0
8384: -------------------------------------
8385: 13 0 14 | 15 16 17 | 0 0
8386: Proc1 0 18 0 | 19 20 21 | 0 0
8387: 0 0 0 | 22 23 0 | 24 0
8388: -------------------------------------
8389: Proc2 25 26 27 | 0 0 28 | 29 0
8390: 30 0 0 | 31 32 33 | 0 34
8391: .ve
8393: Suppose `isrow` = [0 1 | 4 | 6 7] and `iscol` = [1 2 | 3 4 5 | 6]. The resulting submatrix is
8395: .vb
8396: 2 0 | 0 3 0 | 0
8397: Proc0 5 6 | 7 0 0 | 8
8398: -------------------------------
8399: Proc1 18 0 | 19 20 21 | 0
8400: -------------------------------
8401: Proc2 26 27 | 0 0 28 | 29
8402: 0 0 | 31 32 33 | 0
8403: .ve
8405: .seealso: [](ch_matrices), `Mat`, `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8406: @*/
8407: PetscErrorCode MatCreateSubMatrix(Mat mat, IS isrow, IS iscol, MatReuse cll, Mat *newmat)
8408: {
8409: PetscMPIInt size;
8410: Mat *local;
8411: IS iscoltmp;
8412: PetscBool flg;
8414: PetscFunctionBegin;
8418: PetscAssertPointer(newmat, 5);
8421: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
8422: PetscCheck(cll != MAT_IGNORE_MATRIX, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Cannot use MAT_IGNORE_MATRIX");
8424: MatCheckPreallocated(mat, 1);
8425: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
8427: if (!iscol || isrow == iscol) {
8428: PetscBool stride;
8429: PetscMPIInt grabentirematrix = 0, grab;
8430: PetscCall(PetscObjectTypeCompare((PetscObject)isrow, ISSTRIDE, &stride));
8431: if (stride) {
8432: PetscInt first, step, n, rstart, rend;
8433: PetscCall(ISStrideGetInfo(isrow, &first, &step));
8434: if (step == 1) {
8435: PetscCall(MatGetOwnershipRange(mat, &rstart, &rend));
8436: if (rstart == first) {
8437: PetscCall(ISGetLocalSize(isrow, &n));
8438: if (n == rend - rstart) grabentirematrix = 1;
8439: }
8440: }
8441: }
8442: PetscCall(MPIU_Allreduce(&grabentirematrix, &grab, 1, MPI_INT, MPI_MIN, PetscObjectComm((PetscObject)mat)));
8443: if (grab) {
8444: PetscCall(PetscInfo(mat, "Getting entire matrix as submatrix\n"));
8445: if (cll == MAT_INITIAL_MATRIX) {
8446: *newmat = mat;
8447: PetscCall(PetscObjectReference((PetscObject)mat));
8448: }
8449: PetscFunctionReturn(PETSC_SUCCESS);
8450: }
8451: }
8453: if (!iscol) {
8454: PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat), mat->cmap->n, mat->cmap->rstart, 1, &iscoltmp));
8455: } else {
8456: iscoltmp = iscol;
8457: }
8459: /* if original matrix is on just one processor then use submatrix generated */
8460: if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8461: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_REUSE_MATRIX, &newmat));
8462: goto setproperties;
8463: } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8464: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscoltmp, MAT_INITIAL_MATRIX, &local));
8465: *newmat = *local;
8466: PetscCall(PetscFree(local));
8467: goto setproperties;
8468: } else if (!mat->ops->createsubmatrix) {
8469: /* Create a new matrix type that implements the operation using the full matrix */
8470: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8471: switch (cll) {
8472: case MAT_INITIAL_MATRIX:
8473: PetscCall(MatCreateSubMatrixVirtual(mat, isrow, iscoltmp, newmat));
8474: break;
8475: case MAT_REUSE_MATRIX:
8476: PetscCall(MatSubMatrixVirtualUpdate(*newmat, mat, isrow, iscoltmp));
8477: break;
8478: default:
8479: SETERRQ(PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8480: }
8481: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8482: goto setproperties;
8483: }
8485: PetscCall(PetscLogEventBegin(MAT_CreateSubMat, mat, 0, 0, 0));
8486: PetscUseTypeMethod(mat, createsubmatrix, isrow, iscoltmp, cll, newmat);
8487: PetscCall(PetscLogEventEnd(MAT_CreateSubMat, mat, 0, 0, 0));
8489: setproperties:
8490: PetscCall(ISEqualUnsorted(isrow, iscoltmp, &flg));
8491: if (flg) PetscCall(MatPropagateSymmetryOptions(mat, *newmat));
8492: if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8493: if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8494: PetscFunctionReturn(PETSC_SUCCESS);
8495: }
8497: /*@
8498: MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8500: Not Collective
8502: Input Parameters:
8503: + A - the matrix we wish to propagate options from
8504: - B - the matrix we wish to propagate options to
8506: Level: beginner
8508: Note:
8509: Propagates the options associated to `MAT_SYMMETRY_ETERNAL`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_HERMITIAN`, `MAT_SPD`, `MAT_SYMMETRIC`, and `MAT_STRUCTURAL_SYMMETRY_ETERNAL`
8511: .seealso: [](ch_matrices), `Mat`, `MatSetOption()`, `MatIsSymmetricKnown()`, `MatIsSPDKnown()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetricKnown()`
8512: @*/
8513: PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8514: {
8515: PetscFunctionBegin;
8518: B->symmetry_eternal = A->symmetry_eternal;
8519: B->structural_symmetry_eternal = A->structural_symmetry_eternal;
8520: B->symmetric = A->symmetric;
8521: B->structurally_symmetric = A->structurally_symmetric;
8522: B->spd = A->spd;
8523: B->hermitian = A->hermitian;
8524: PetscFunctionReturn(PETSC_SUCCESS);
8525: }
8527: /*@
8528: MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8529: used during the assembly process to store values that belong to
8530: other processors.
8532: Not Collective
8534: Input Parameters:
8535: + mat - the matrix
8536: . size - the initial size of the stash.
8537: - bsize - the initial size of the block-stash(if used).
8539: Options Database Keys:
8540: + -matstash_initial_size <size> or <size0,size1,...sizep-1> - set initial size
8541: - -matstash_block_initial_size <bsize> or <bsize0,bsize1,...bsizep-1> - set initial block size
8543: Level: intermediate
8545: Notes:
8546: The block-stash is used for values set with `MatSetValuesBlocked()` while
8547: the stash is used for values set with `MatSetValues()`
8549: Run with the option -info and look for output of the form
8550: MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8551: to determine the appropriate value, MM, to use for size and
8552: MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8553: to determine the value, BMM to use for bsize
8555: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8556: @*/
8557: PetscErrorCode MatStashSetInitialSize(Mat mat, PetscInt size, PetscInt bsize)
8558: {
8559: PetscFunctionBegin;
8562: PetscCall(MatStashSetInitialSize_Private(&mat->stash, size));
8563: PetscCall(MatStashSetInitialSize_Private(&mat->bstash, bsize));
8564: PetscFunctionReturn(PETSC_SUCCESS);
8565: }
8567: /*@
8568: MatInterpolateAdd - $w = y + A*x$ or $A^T*x$ depending on the shape of
8569: the matrix
8571: Neighbor-wise Collective
8573: Input Parameters:
8574: + A - the matrix
8575: . x - the vector to be multiplied by the interpolation operator
8576: - y - the vector to be added to the result
8578: Output Parameter:
8579: . w - the resulting vector
8581: Level: intermediate
8583: Notes:
8584: `w` may be the same vector as `y`.
8586: This allows one to use either the restriction or interpolation (its transpose)
8587: matrix to do the interpolation
8589: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8590: @*/
8591: PetscErrorCode MatInterpolateAdd(Mat A, Vec x, Vec y, Vec w)
8592: {
8593: PetscInt M, N, Ny;
8595: PetscFunctionBegin;
8600: PetscCall(MatGetSize(A, &M, &N));
8601: PetscCall(VecGetSize(y, &Ny));
8602: if (M == Ny) {
8603: PetscCall(MatMultAdd(A, x, y, w));
8604: } else {
8605: PetscCall(MatMultTransposeAdd(A, x, y, w));
8606: }
8607: PetscFunctionReturn(PETSC_SUCCESS);
8608: }
8610: /*@
8611: MatInterpolate - $y = A*x$ or $A^T*x$ depending on the shape of
8612: the matrix
8614: Neighbor-wise Collective
8616: Input Parameters:
8617: + A - the matrix
8618: - x - the vector to be interpolated
8620: Output Parameter:
8621: . y - the resulting vector
8623: Level: intermediate
8625: Note:
8626: This allows one to use either the restriction or interpolation (its transpose)
8627: matrix to do the interpolation
8629: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`, `PCMG`
8630: @*/
8631: PetscErrorCode MatInterpolate(Mat A, Vec x, Vec y)
8632: {
8633: PetscInt M, N, Ny;
8635: PetscFunctionBegin;
8639: PetscCall(MatGetSize(A, &M, &N));
8640: PetscCall(VecGetSize(y, &Ny));
8641: if (M == Ny) {
8642: PetscCall(MatMult(A, x, y));
8643: } else {
8644: PetscCall(MatMultTranspose(A, x, y));
8645: }
8646: PetscFunctionReturn(PETSC_SUCCESS);
8647: }
8649: /*@
8650: MatRestrict - $y = A*x$ or $A^T*x$
8652: Neighbor-wise Collective
8654: Input Parameters:
8655: + A - the matrix
8656: - x - the vector to be restricted
8658: Output Parameter:
8659: . y - the resulting vector
8661: Level: intermediate
8663: Note:
8664: This allows one to use either the restriction or interpolation (its transpose)
8665: matrix to do the restriction
8667: .seealso: [](ch_matrices), `Mat`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`, `PCMG`
8668: @*/
8669: PetscErrorCode MatRestrict(Mat A, Vec x, Vec y)
8670: {
8671: PetscInt M, N, Ny;
8673: PetscFunctionBegin;
8677: PetscCall(MatGetSize(A, &M, &N));
8678: PetscCall(VecGetSize(y, &Ny));
8679: if (M == Ny) {
8680: PetscCall(MatMult(A, x, y));
8681: } else {
8682: PetscCall(MatMultTranspose(A, x, y));
8683: }
8684: PetscFunctionReturn(PETSC_SUCCESS);
8685: }
8687: /*@
8688: MatMatInterpolateAdd - $Y = W + A*X$ or $W + A^T*X$ depending on the shape of `A`
8690: Neighbor-wise Collective
8692: Input Parameters:
8693: + A - the matrix
8694: . x - the input dense matrix to be multiplied
8695: - w - the input dense matrix to be added to the result
8697: Output Parameter:
8698: . y - the output dense matrix
8700: Level: intermediate
8702: Note:
8703: This allows one to use either the restriction or interpolation (its transpose)
8704: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8705: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8707: .seealso: [](ch_matrices), `Mat`, `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`, `PCMG`
8708: @*/
8709: PetscErrorCode MatMatInterpolateAdd(Mat A, Mat x, Mat w, Mat *y)
8710: {
8711: PetscInt M, N, Mx, Nx, Mo, My = 0, Ny = 0;
8712: PetscBool trans = PETSC_TRUE;
8713: MatReuse reuse = MAT_INITIAL_MATRIX;
8715: PetscFunctionBegin;
8721: PetscCall(MatGetSize(A, &M, &N));
8722: PetscCall(MatGetSize(x, &Mx, &Nx));
8723: if (N == Mx) trans = PETSC_FALSE;
8724: else PetscCheck(M == Mx, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx);
8725: Mo = trans ? N : M;
8726: if (*y) {
8727: PetscCall(MatGetSize(*y, &My, &Ny));
8728: if (Mo == My && Nx == Ny) {
8729: reuse = MAT_REUSE_MATRIX;
8730: } else {
8731: PetscCheck(w || *y != w, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT, M, N, Mx, Nx, My, Ny);
8732: PetscCall(MatDestroy(y));
8733: }
8734: }
8736: if (w && *y == w) { /* this is to minimize changes in PCMG */
8737: PetscBool flg;
8739: PetscCall(PetscObjectQuery((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject *)&w));
8740: if (w) {
8741: PetscInt My, Ny, Mw, Nw;
8743: PetscCall(PetscObjectTypeCompare((PetscObject)*y, ((PetscObject)w)->type_name, &flg));
8744: PetscCall(MatGetSize(*y, &My, &Ny));
8745: PetscCall(MatGetSize(w, &Mw, &Nw));
8746: if (!flg || My != Mw || Ny != Nw) w = NULL;
8747: }
8748: if (!w) {
8749: PetscCall(MatDuplicate(*y, MAT_COPY_VALUES, &w));
8750: PetscCall(PetscObjectCompose((PetscObject)*y, "__MatMatIntAdd_w", (PetscObject)w));
8751: PetscCall(PetscObjectDereference((PetscObject)w));
8752: } else {
8753: PetscCall(MatCopy(*y, w, UNKNOWN_NONZERO_PATTERN));
8754: }
8755: }
8756: if (!trans) {
8757: PetscCall(MatMatMult(A, x, reuse, PETSC_DEFAULT, y));
8758: } else {
8759: PetscCall(MatTransposeMatMult(A, x, reuse, PETSC_DEFAULT, y));
8760: }
8761: if (w) PetscCall(MatAXPY(*y, 1.0, w, UNKNOWN_NONZERO_PATTERN));
8762: PetscFunctionReturn(PETSC_SUCCESS);
8763: }
8765: /*@
8766: MatMatInterpolate - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8768: Neighbor-wise Collective
8770: Input Parameters:
8771: + A - the matrix
8772: - x - the input dense matrix
8774: Output Parameter:
8775: . y - the output dense matrix
8777: Level: intermediate
8779: Note:
8780: This allows one to use either the restriction or interpolation (its transpose)
8781: matrix to do the interpolation. `y` matrix can be reused if already created with the proper sizes,
8782: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8784: .seealso: [](ch_matrices), `Mat`, `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`, `PCMG`
8785: @*/
8786: PetscErrorCode MatMatInterpolate(Mat A, Mat x, Mat *y)
8787: {
8788: PetscFunctionBegin;
8789: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8790: PetscFunctionReturn(PETSC_SUCCESS);
8791: }
8793: /*@
8794: MatMatRestrict - $Y = A*X$ or $A^T*X$ depending on the shape of `A`
8796: Neighbor-wise Collective
8798: Input Parameters:
8799: + A - the matrix
8800: - x - the input dense matrix
8802: Output Parameter:
8803: . y - the output dense matrix
8805: Level: intermediate
8807: Note:
8808: This allows one to use either the restriction or interpolation (its transpose)
8809: matrix to do the restriction. `y` matrix can be reused if already created with the proper sizes,
8810: otherwise it will be recreated. `y` must be initialized to `NULL` if not supplied.
8812: .seealso: [](ch_matrices), `Mat`, `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`, `PCMG`
8813: @*/
8814: PetscErrorCode MatMatRestrict(Mat A, Mat x, Mat *y)
8815: {
8816: PetscFunctionBegin;
8817: PetscCall(MatMatInterpolateAdd(A, x, NULL, y));
8818: PetscFunctionReturn(PETSC_SUCCESS);
8819: }
8821: /*@
8822: MatGetNullSpace - retrieves the null space of a matrix.
8824: Logically Collective
8826: Input Parameters:
8827: + mat - the matrix
8828: - nullsp - the null space object
8830: Level: developer
8832: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`, `MatNullSpace`
8833: @*/
8834: PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8835: {
8836: PetscFunctionBegin;
8838: PetscAssertPointer(nullsp, 2);
8839: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8840: PetscFunctionReturn(PETSC_SUCCESS);
8841: }
8843: /*@
8844: MatSetNullSpace - attaches a null space to a matrix.
8846: Logically Collective
8848: Input Parameters:
8849: + mat - the matrix
8850: - nullsp - the null space object
8852: Level: advanced
8854: Notes:
8855: This null space is used by the `KSP` linear solvers to solve singular systems.
8857: Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of `NULL`
8859: For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) the `KSP` residuals will not converge to
8860: to zero but the linear system will still be solved in a least squares sense.
8862: The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8863: the domain of a matrix A (from $R^n$ to $R^m$ (m rows, n columns) $R^n$ = the direct sum of the null space of A, n(A), + the range of $A^T$, $R(A^T)$.
8864: Similarly $R^m$ = direct sum n($A^T$) + R(A). Hence the linear system $A x = b$ has a solution only if b in R(A) (or correspondingly b is orthogonal to
8865: n($A^T$)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8866: the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n($A^T$).
8867: This \hat{b} can be obtained by calling `MatNullSpaceRemove()` with the null space of the transpose of the matrix.
8869: If the matrix is known to be symmetric because it is an `MATSBAIJ` matrix or one as called
8870: `MatSetOption`(mat,`MAT_SYMMETRIC` or possibly `MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`); this
8871: routine also automatically calls `MatSetTransposeNullSpace()`.
8873: The user should call `MatNullSpaceDestroy()`.
8875: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
8876: `KSPSetPCSide()`
8877: @*/
8878: PetscErrorCode MatSetNullSpace(Mat mat, MatNullSpace nullsp)
8879: {
8880: PetscFunctionBegin;
8883: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8884: PetscCall(MatNullSpaceDestroy(&mat->nullsp));
8885: mat->nullsp = nullsp;
8886: if (mat->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetTransposeNullSpace(mat, nullsp));
8887: PetscFunctionReturn(PETSC_SUCCESS);
8888: }
8890: /*@
8891: MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8893: Logically Collective
8895: Input Parameters:
8896: + mat - the matrix
8897: - nullsp - the null space object
8899: Level: developer
8901: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
8902: @*/
8903: PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8904: {
8905: PetscFunctionBegin;
8908: PetscAssertPointer(nullsp, 2);
8909: *nullsp = (mat->symmetric == PETSC_BOOL3_TRUE && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8910: PetscFunctionReturn(PETSC_SUCCESS);
8911: }
8913: /*@
8914: MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
8916: Logically Collective
8918: Input Parameters:
8919: + mat - the matrix
8920: - nullsp - the null space object
8922: Level: advanced
8924: Notes:
8925: This allows solving singular linear systems defined by the transpose of the matrix using `KSP` solvers with left preconditioning.
8927: See `MatSetNullSpace()`
8929: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
8930: @*/
8931: PetscErrorCode MatSetTransposeNullSpace(Mat mat, MatNullSpace nullsp)
8932: {
8933: PetscFunctionBegin;
8936: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8937: PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
8938: mat->transnullsp = nullsp;
8939: PetscFunctionReturn(PETSC_SUCCESS);
8940: }
8942: /*@
8943: MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
8944: This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
8946: Logically Collective
8948: Input Parameters:
8949: + mat - the matrix
8950: - nullsp - the null space object
8952: Level: advanced
8954: Notes:
8955: Overwrites any previous near null space that may have been attached
8957: You can remove the null space by calling this routine with an `nullsp` of `NULL`
8959: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
8960: @*/
8961: PetscErrorCode MatSetNearNullSpace(Mat mat, MatNullSpace nullsp)
8962: {
8963: PetscFunctionBegin;
8967: MatCheckPreallocated(mat, 1);
8968: if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8969: PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
8970: mat->nearnullsp = nullsp;
8971: PetscFunctionReturn(PETSC_SUCCESS);
8972: }
8974: /*@
8975: MatGetNearNullSpace - Get null space attached with `MatSetNearNullSpace()`
8977: Not Collective
8979: Input Parameter:
8980: . mat - the matrix
8982: Output Parameter:
8983: . nullsp - the null space object, `NULL` if not set
8985: Level: advanced
8987: .seealso: [](ch_matrices), `Mat`, `MatNullSpace`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
8988: @*/
8989: PetscErrorCode MatGetNearNullSpace(Mat mat, MatNullSpace *nullsp)
8990: {
8991: PetscFunctionBegin;
8994: PetscAssertPointer(nullsp, 2);
8995: MatCheckPreallocated(mat, 1);
8996: *nullsp = mat->nearnullsp;
8997: PetscFunctionReturn(PETSC_SUCCESS);
8998: }
9000: /*@C
9001: MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
9003: Collective
9005: Input Parameters:
9006: + mat - the matrix
9007: . row - row/column permutation
9008: - info - information on desired factorization process
9010: Level: developer
9012: Notes:
9013: Probably really in-place only when level of fill is zero, otherwise allocates
9014: new space to store factored matrix and deletes previous memory.
9016: Most users should employ the `KSP` interface for linear solvers
9017: instead of working directly with matrix algebra routines such as this.
9018: See, e.g., `KSPCreate()`.
9020: Developer Note:
9021: The Fortran interface is not autogenerated as the
9022: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9024: .seealso: [](ch_matrices), `Mat`, `MatFactorInfo`, `MatGetFactor()`, `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
9025: @*/
9026: PetscErrorCode MatICCFactor(Mat mat, IS row, const MatFactorInfo *info)
9027: {
9028: PetscFunctionBegin;
9032: PetscAssertPointer(info, 3);
9033: PetscCheck(mat->rmap->N == mat->cmap->N, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONG, "matrix must be square");
9034: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
9035: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
9036: MatCheckPreallocated(mat, 1);
9037: PetscUseTypeMethod(mat, iccfactor, row, info);
9038: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9039: PetscFunctionReturn(PETSC_SUCCESS);
9040: }
9042: /*@
9043: MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
9044: ghosted ones.
9046: Not Collective
9048: Input Parameters:
9049: + mat - the matrix
9050: - diag - the diagonal values, including ghost ones
9052: Level: developer
9054: Notes:
9055: Works only for `MATMPIAIJ` and `MATMPIBAIJ` matrices
9057: This allows one to avoid during communication to perform the scaling that must be done with `MatDiagonalScale()`
9059: .seealso: [](ch_matrices), `Mat`, `MatDiagonalScale()`
9060: @*/
9061: PetscErrorCode MatDiagonalScaleLocal(Mat mat, Vec diag)
9062: {
9063: PetscMPIInt size;
9065: PetscFunctionBegin;
9070: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must be already assembled");
9071: PetscCall(PetscLogEventBegin(MAT_Scale, mat, 0, 0, 0));
9072: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
9073: if (size == 1) {
9074: PetscInt n, m;
9075: PetscCall(VecGetSize(diag, &n));
9076: PetscCall(MatGetSize(mat, NULL, &m));
9077: PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_SUP, "Only supported for sequential matrices when no ghost points/periodic conditions");
9078: PetscCall(MatDiagonalScale(mat, NULL, diag));
9079: } else {
9080: PetscUseMethod(mat, "MatDiagonalScaleLocal_C", (Mat, Vec), (mat, diag));
9081: }
9082: PetscCall(PetscLogEventEnd(MAT_Scale, mat, 0, 0, 0));
9083: PetscCall(PetscObjectStateIncrease((PetscObject)mat));
9084: PetscFunctionReturn(PETSC_SUCCESS);
9085: }
9087: /*@
9088: MatGetInertia - Gets the inertia from a factored matrix
9090: Collective
9092: Input Parameter:
9093: . mat - the matrix
9095: Output Parameters:
9096: + nneg - number of negative eigenvalues
9097: . nzero - number of zero eigenvalues
9098: - npos - number of positive eigenvalues
9100: Level: advanced
9102: Note:
9103: Matrix must have been factored by `MatCholeskyFactor()`
9105: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatCholeskyFactor()`
9106: @*/
9107: PetscErrorCode MatGetInertia(Mat mat, PetscInt *nneg, PetscInt *nzero, PetscInt *npos)
9108: {
9109: PetscFunctionBegin;
9112: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9113: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Numeric factor mat is not assembled");
9114: PetscUseTypeMethod(mat, getinertia, nneg, nzero, npos);
9115: PetscFunctionReturn(PETSC_SUCCESS);
9116: }
9118: /*@C
9119: MatSolves - Solves $A x = b$, given a factored matrix, for a collection of vectors
9121: Neighbor-wise Collective
9123: Input Parameters:
9124: + mat - the factored matrix obtained with `MatGetFactor()`
9125: - b - the right-hand-side vectors
9127: Output Parameter:
9128: . x - the result vectors
9130: Level: developer
9132: Note:
9133: The vectors `b` and `x` cannot be the same. I.e., one cannot
9134: call `MatSolves`(A,x,x).
9136: .seealso: [](ch_matrices), `Mat`, `Vecs`, `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
9137: @*/
9138: PetscErrorCode MatSolves(Mat mat, Vecs b, Vecs x)
9139: {
9140: PetscFunctionBegin;
9143: PetscCheck(x != b, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_IDN, "x and b must be different vectors");
9144: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Unfactored matrix");
9145: if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(PETSC_SUCCESS);
9147: MatCheckPreallocated(mat, 1);
9148: PetscCall(PetscLogEventBegin(MAT_Solves, mat, 0, 0, 0));
9149: PetscUseTypeMethod(mat, solves, b, x);
9150: PetscCall(PetscLogEventEnd(MAT_Solves, mat, 0, 0, 0));
9151: PetscFunctionReturn(PETSC_SUCCESS);
9152: }
9154: /*@
9155: MatIsSymmetric - Test whether a matrix is symmetric
9157: Collective
9159: Input Parameters:
9160: + A - the matrix to test
9161: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
9163: Output Parameter:
9164: . flg - the result
9166: Level: intermediate
9168: Notes:
9169: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9171: If the matrix does not yet know if it is symmetric or not this can be an expensive operation, also available `MatIsSymmetricKnown()`
9173: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9174: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9176: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`,
9177: `MAT_SYMMETRIC`, `MAT_SYMMETRY_ETERNAL`
9178: @*/
9179: PetscErrorCode MatIsSymmetric(Mat A, PetscReal tol, PetscBool *flg)
9180: {
9181: PetscFunctionBegin;
9183: PetscAssertPointer(flg, 3);
9185: if (A->symmetric == PETSC_BOOL3_TRUE) *flg = PETSC_TRUE;
9186: else if (A->symmetric == PETSC_BOOL3_FALSE) *flg = PETSC_FALSE;
9187: else {
9188: PetscUseTypeMethod(A, issymmetric, tol, flg);
9189: if (!tol) PetscCall(MatSetOption(A, MAT_SYMMETRIC, *flg));
9190: }
9191: PetscFunctionReturn(PETSC_SUCCESS);
9192: }
9194: /*@
9195: MatIsHermitian - Test whether a matrix is Hermitian
9197: Collective
9199: Input Parameters:
9200: + A - the matrix to test
9201: - tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9203: Output Parameter:
9204: . flg - the result
9206: Level: intermediate
9208: Notes:
9209: For real numbers `MatIsSymmetric()` and `MatIsHermitian()` return identical results
9211: If the matrix does not yet know if it is Hermitian or not this can be an expensive operation, also available `MatIsHermitianKnown()`
9213: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9214: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMEMTRY_ETERNAL`,`PETSC_TRUE`)
9216: .seealso: [](ch_matrices), `Mat`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitianKnown()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9217: `MatIsSymmetricKnown()`, `MatIsSymmetric()`, `MAT_HERMITIAN`, `MAT_SYMMETRY_ETERNAL`
9218: @*/
9219: PetscErrorCode MatIsHermitian(Mat A, PetscReal tol, PetscBool *flg)
9220: {
9221: PetscFunctionBegin;
9223: PetscAssertPointer(flg, 3);
9225: if (A->hermitian == PETSC_BOOL3_TRUE) *flg = PETSC_TRUE;
9226: else if (A->hermitian == PETSC_BOOL3_FALSE) *flg = PETSC_FALSE;
9227: else {
9228: PetscUseTypeMethod(A, ishermitian, tol, flg);
9229: if (!tol) PetscCall(MatSetOption(A, MAT_HERMITIAN, *flg));
9230: }
9231: PetscFunctionReturn(PETSC_SUCCESS);
9232: }
9234: /*@
9235: MatIsSymmetricKnown - Checks if a matrix knows if it is symmetric or not and its symmetric state
9237: Not Collective
9239: Input Parameter:
9240: . A - the matrix to check
9242: Output Parameters:
9243: + set - `PETSC_TRUE` if the matrix knows its symmetry state (this tells you if the next flag is valid)
9244: - flg - the result (only valid if set is `PETSC_TRUE`)
9246: Level: advanced
9248: Notes:
9249: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsSymmetric()`
9250: if you want it explicitly checked
9252: One can declare that a matrix is symmetric with `MatSetOption`(mat,`MAT_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain symmetric
9253: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9255: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9256: @*/
9257: PetscErrorCode MatIsSymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9258: {
9259: PetscFunctionBegin;
9261: PetscAssertPointer(set, 2);
9262: PetscAssertPointer(flg, 3);
9263: if (A->symmetric != PETSC_BOOL3_UNKNOWN) {
9264: *set = PETSC_TRUE;
9265: *flg = PetscBool3ToBool(A->symmetric);
9266: } else {
9267: *set = PETSC_FALSE;
9268: }
9269: PetscFunctionReturn(PETSC_SUCCESS);
9270: }
9272: /*@
9273: MatIsSPDKnown - Checks if a matrix knows if it is symmetric positive definite or not and its symmetric positive definite state
9275: Not Collective
9277: Input Parameter:
9278: . A - the matrix to check
9280: Output Parameters:
9281: + set - `PETSC_TRUE` if the matrix knows its symmetric positive definite state (this tells you if the next flag is valid)
9282: - flg - the result (only valid if set is `PETSC_TRUE`)
9284: Level: advanced
9286: Notes:
9287: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`).
9289: One can declare that a matrix is SPD with `MatSetOption`(mat,`MAT_SPD`,`PETSC_TRUE`) and if it is known to remain SPD
9290: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SPD_ETERNAL`,`PETSC_TRUE`)
9292: .seealso: [](ch_matrices), `Mat`, `MAT_SPD_ETERNAL`, `MAT_SPD`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9293: @*/
9294: PetscErrorCode MatIsSPDKnown(Mat A, PetscBool *set, PetscBool *flg)
9295: {
9296: PetscFunctionBegin;
9298: PetscAssertPointer(set, 2);
9299: PetscAssertPointer(flg, 3);
9300: if (A->spd != PETSC_BOOL3_UNKNOWN) {
9301: *set = PETSC_TRUE;
9302: *flg = PetscBool3ToBool(A->spd);
9303: } else {
9304: *set = PETSC_FALSE;
9305: }
9306: PetscFunctionReturn(PETSC_SUCCESS);
9307: }
9309: /*@
9310: MatIsHermitianKnown - Checks if a matrix knows if it is Hermitian or not and its Hermitian state
9312: Not Collective
9314: Input Parameter:
9315: . A - the matrix to check
9317: Output Parameters:
9318: + set - `PETSC_TRUE` if the matrix knows its Hermitian state (this tells you if the next flag is valid)
9319: - flg - the result (only valid if set is `PETSC_TRUE`)
9321: Level: advanced
9323: Notes:
9324: Does not check the matrix values directly, so this may return unknown (set = `PETSC_FALSE`). Use `MatIsHermitian()`
9325: if you want it explicitly checked
9327: One can declare that a matrix is Hermitian with `MatSetOption`(mat,`MAT_HERMITIAN`,`PETSC_TRUE`) and if it is known to remain Hermitian
9328: after changes to the matrices values one can call `MatSetOption`(mat,`MAT_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9330: .seealso: [](ch_matrices), `Mat`, `MAT_SYMMETRY_ETERNAL`, `MAT_HERMITIAN`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9331: @*/
9332: PetscErrorCode MatIsHermitianKnown(Mat A, PetscBool *set, PetscBool *flg)
9333: {
9334: PetscFunctionBegin;
9336: PetscAssertPointer(set, 2);
9337: PetscAssertPointer(flg, 3);
9338: if (A->hermitian != PETSC_BOOL3_UNKNOWN) {
9339: *set = PETSC_TRUE;
9340: *flg = PetscBool3ToBool(A->hermitian);
9341: } else {
9342: *set = PETSC_FALSE;
9343: }
9344: PetscFunctionReturn(PETSC_SUCCESS);
9345: }
9347: /*@
9348: MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9350: Collective
9352: Input Parameter:
9353: . A - the matrix to test
9355: Output Parameter:
9356: . flg - the result
9358: Level: intermediate
9360: Notes:
9361: If the matrix does yet know it is structurally symmetric this can be an expensive operation, also available `MatIsStructurallySymmetricKnown()`
9363: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9364: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9366: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MAT_STRUCTURAL_SYMMETRY_ETERNAL`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`, `MatIsStructurallySymmetricKnown()`
9367: @*/
9368: PetscErrorCode MatIsStructurallySymmetric(Mat A, PetscBool *flg)
9369: {
9370: PetscFunctionBegin;
9372: PetscAssertPointer(flg, 2);
9373: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9374: *flg = PetscBool3ToBool(A->structurally_symmetric);
9375: } else {
9376: PetscUseTypeMethod(A, isstructurallysymmetric, flg);
9377: PetscCall(MatSetOption(A, MAT_STRUCTURALLY_SYMMETRIC, *flg));
9378: }
9379: PetscFunctionReturn(PETSC_SUCCESS);
9380: }
9382: /*@
9383: MatIsStructurallySymmetricKnown - Checks if a matrix knows if it is structurally symmetric or not and its structurally symmetric state
9385: Not Collective
9387: Input Parameter:
9388: . A - the matrix to check
9390: Output Parameters:
9391: + set - PETSC_TRUE if the matrix knows its structurally symmetric state (this tells you if the next flag is valid)
9392: - flg - the result (only valid if set is PETSC_TRUE)
9394: Level: advanced
9396: Notes:
9397: One can declare that a matrix is structurally symmetric with `MatSetOption`(mat,`MAT_STRUCTURALLY_SYMMETRIC`,`PETSC_TRUE`) and if it is known to remain structurally
9398: symmetric after changes to the matrices values one can call `MatSetOption`(mat,`MAT_STRUCTURAL_SYMMETRY_ETERNAL`,`PETSC_TRUE`)
9400: Use `MatIsStructurallySymmetric()` to explicitly check if a matrix is structurally symmetric (this is an expensive operation)
9402: .seealso: [](ch_matrices), `Mat`, `MAT_STRUCTURALLY_SYMMETRIC`, `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`, `MatIsHermitianKnown()`
9403: @*/
9404: PetscErrorCode MatIsStructurallySymmetricKnown(Mat A, PetscBool *set, PetscBool *flg)
9405: {
9406: PetscFunctionBegin;
9408: PetscAssertPointer(set, 2);
9409: PetscAssertPointer(flg, 3);
9410: if (A->structurally_symmetric != PETSC_BOOL3_UNKNOWN) {
9411: *set = PETSC_TRUE;
9412: *flg = PetscBool3ToBool(A->structurally_symmetric);
9413: } else {
9414: *set = PETSC_FALSE;
9415: }
9416: PetscFunctionReturn(PETSC_SUCCESS);
9417: }
9419: /*@
9420: MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9421: to be communicated to other processors during the `MatAssemblyBegin()`/`MatAssemblyEnd()` process
9423: Not Collective
9425: Input Parameter:
9426: . mat - the matrix
9428: Output Parameters:
9429: + nstash - the size of the stash
9430: . reallocs - the number of additional mallocs incurred.
9431: . bnstash - the size of the block stash
9432: - breallocs - the number of additional mallocs incurred.in the block stash
9434: Level: advanced
9436: .seealso: [](ch_matrices), `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9437: @*/
9438: PetscErrorCode MatStashGetInfo(Mat mat, PetscInt *nstash, PetscInt *reallocs, PetscInt *bnstash, PetscInt *breallocs)
9439: {
9440: PetscFunctionBegin;
9441: PetscCall(MatStashGetInfo_Private(&mat->stash, nstash, reallocs));
9442: PetscCall(MatStashGetInfo_Private(&mat->bstash, bnstash, breallocs));
9443: PetscFunctionReturn(PETSC_SUCCESS);
9444: }
9446: /*@C
9447: MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9448: parallel layout, `PetscLayout` for rows and columns
9450: Collective
9452: Input Parameter:
9453: . mat - the matrix
9455: Output Parameters:
9456: + right - (optional) vector that the matrix can be multiplied against
9457: - left - (optional) vector that the matrix vector product can be stored in
9459: Level: advanced
9461: Notes:
9462: The blocksize of the returned vectors is determined by the row and column block sizes set with `MatSetBlockSizes()` or the single blocksize (same for both) set by `MatSetBlockSize()`.
9464: These are new vectors which are not owned by the mat, they should be destroyed in `VecDestroy()` when no longer needed
9466: .seealso: [](ch_matrices), `Mat`, `Vec`, `VecCreate()`, `VecDestroy()`, `DMCreateGlobalVector()`
9467: @*/
9468: PetscErrorCode MatCreateVecs(Mat mat, Vec *right, Vec *left)
9469: {
9470: PetscFunctionBegin;
9473: if (mat->ops->getvecs) {
9474: PetscUseTypeMethod(mat, getvecs, right, left);
9475: } else {
9476: if (right) {
9477: PetscCheck(mat->cmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for columns not yet setup");
9478: PetscCall(VecCreateWithLayout_Private(mat->cmap, right));
9479: PetscCall(VecSetType(*right, mat->defaultvectype));
9480: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9481: if (mat->boundtocpu && mat->bindingpropagates) {
9482: PetscCall(VecSetBindingPropagates(*right, PETSC_TRUE));
9483: PetscCall(VecBindToCPU(*right, PETSC_TRUE));
9484: }
9485: #endif
9486: }
9487: if (left) {
9488: PetscCheck(mat->rmap->n >= 0, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "PetscLayout for rows not yet setup");
9489: PetscCall(VecCreateWithLayout_Private(mat->rmap, left));
9490: PetscCall(VecSetType(*left, mat->defaultvectype));
9491: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
9492: if (mat->boundtocpu && mat->bindingpropagates) {
9493: PetscCall(VecSetBindingPropagates(*left, PETSC_TRUE));
9494: PetscCall(VecBindToCPU(*left, PETSC_TRUE));
9495: }
9496: #endif
9497: }
9498: }
9499: PetscFunctionReturn(PETSC_SUCCESS);
9500: }
9502: /*@C
9503: MatFactorInfoInitialize - Initializes a `MatFactorInfo` data structure
9504: with default values.
9506: Not Collective
9508: Input Parameter:
9509: . info - the `MatFactorInfo` data structure
9511: Level: developer
9513: Notes:
9514: The solvers are generally used through the `KSP` and `PC` objects, for example
9515: `PCLU`, `PCILU`, `PCCHOLESKY`, `PCICC`
9517: Once the data structure is initialized one may change certain entries as desired for the particular factorization to be performed
9519: Developer Note:
9520: The Fortran interface is not autogenerated as the
9521: interface definition cannot be generated correctly [due to `MatFactorInfo`]
9523: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorInfo`
9524: @*/
9525: PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9526: {
9527: PetscFunctionBegin;
9528: PetscCall(PetscMemzero(info, sizeof(MatFactorInfo)));
9529: PetscFunctionReturn(PETSC_SUCCESS);
9530: }
9532: /*@
9533: MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9535: Collective
9537: Input Parameters:
9538: + mat - the factored matrix
9539: - is - the index set defining the Schur indices (0-based)
9541: Level: advanced
9543: Notes:
9544: Call `MatFactorSolveSchurComplement()` or `MatFactorSolveSchurComplementTranspose()` after this call to solve a Schur complement system.
9546: You can call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` after this call.
9548: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9550: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9551: `MatFactorSolveSchurComplementTranspose()`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9552: @*/
9553: PetscErrorCode MatFactorSetSchurIS(Mat mat, IS is)
9554: {
9555: PetscErrorCode (*f)(Mat, IS);
9557: PetscFunctionBegin;
9562: PetscCheckSameComm(mat, 1, is, 2);
9563: PetscCheck(mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Only for factored matrix");
9564: PetscCall(PetscObjectQueryFunction((PetscObject)mat, "MatFactorSetSchurIS_C", &f));
9565: PetscCheck(f, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9566: PetscCall(MatDestroy(&mat->schur));
9567: PetscCall((*f)(mat, is));
9568: PetscCheck(mat->schur, PetscObjectComm((PetscObject)mat), PETSC_ERR_PLIB, "Schur complement has not been created");
9569: PetscFunctionReturn(PETSC_SUCCESS);
9570: }
9572: /*@
9573: MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9575: Logically Collective
9577: Input Parameters:
9578: + F - the factored matrix obtained by calling `MatGetFactor()`
9579: . S - location where to return the Schur complement, can be `NULL`
9580: - status - the status of the Schur complement matrix, can be `NULL`
9582: Level: advanced
9584: Notes:
9585: You must call `MatFactorSetSchurIS()` before calling this routine.
9587: This functionality is only supported for `MATSOLVERMUMPS` and `MATSOLVERMKL_PARDISO`
9589: The routine provides a copy of the Schur matrix stored within the solver data structures.
9590: The caller must destroy the object when it is no longer needed.
9591: If `MatFactorInvertSchurComplement()` has been called, the routine gets back the inverse.
9593: Use `MatFactorGetSchurComplement()` to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9595: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9597: Developer Note:
9598: The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9599: matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9601: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`, `MATSOLVERMUMPS`, `MATSOLVERMKL_PARDISO`
9602: @*/
9603: PetscErrorCode MatFactorCreateSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9604: {
9605: PetscFunctionBegin;
9607: if (S) PetscAssertPointer(S, 2);
9608: if (status) PetscAssertPointer(status, 3);
9609: if (S) {
9610: PetscErrorCode (*f)(Mat, Mat *);
9612: PetscCall(PetscObjectQueryFunction((PetscObject)F, "MatFactorCreateSchurComplement_C", &f));
9613: if (f) {
9614: PetscCall((*f)(F, S));
9615: } else {
9616: PetscCall(MatDuplicate(F->schur, MAT_COPY_VALUES, S));
9617: }
9618: }
9619: if (status) *status = F->schur_status;
9620: PetscFunctionReturn(PETSC_SUCCESS);
9621: }
9623: /*@
9624: MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9626: Logically Collective
9628: Input Parameters:
9629: + F - the factored matrix obtained by calling `MatGetFactor()`
9630: . S - location where to return the Schur complement, can be `NULL`
9631: - status - the status of the Schur complement matrix, can be `NULL`
9633: Level: advanced
9635: Notes:
9636: You must call `MatFactorSetSchurIS()` before calling this routine.
9638: Schur complement mode is currently implemented for sequential matrices with factor type of `MATSOLVERMUMPS`
9640: The routine returns a the Schur Complement stored within the data structures of the solver.
9642: If `MatFactorInvertSchurComplement()` has previously been called, the returned matrix is actually the inverse of the Schur complement.
9644: The returned matrix should not be destroyed; the caller should call `MatFactorRestoreSchurComplement()` when the object is no longer needed.
9646: Use `MatFactorCreateSchurComplement()` to create a copy of the Schur complement matrix that is within a factored matrix
9648: See `MatCreateSchurComplement()` or `MatGetSchurComplement()` for ways to create virtual or approximate Schur complements.
9650: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9651: @*/
9652: PetscErrorCode MatFactorGetSchurComplement(Mat F, Mat *S, MatFactorSchurStatus *status)
9653: {
9654: PetscFunctionBegin;
9656: if (S) {
9657: PetscAssertPointer(S, 2);
9658: *S = F->schur;
9659: }
9660: if (status) {
9661: PetscAssertPointer(status, 3);
9662: *status = F->schur_status;
9663: }
9664: PetscFunctionReturn(PETSC_SUCCESS);
9665: }
9667: static PetscErrorCode MatFactorUpdateSchurStatus_Private(Mat F)
9668: {
9669: Mat S = F->schur;
9671: PetscFunctionBegin;
9672: switch (F->schur_status) {
9673: case MAT_FACTOR_SCHUR_UNFACTORED: // fall-through
9674: case MAT_FACTOR_SCHUR_INVERTED:
9675: if (S) {
9676: S->ops->solve = NULL;
9677: S->ops->matsolve = NULL;
9678: S->ops->solvetranspose = NULL;
9679: S->ops->matsolvetranspose = NULL;
9680: S->ops->solveadd = NULL;
9681: S->ops->solvetransposeadd = NULL;
9682: S->factortype = MAT_FACTOR_NONE;
9683: PetscCall(PetscFree(S->solvertype));
9684: }
9685: case MAT_FACTOR_SCHUR_FACTORED: // fall-through
9686: break;
9687: default:
9688: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9689: }
9690: PetscFunctionReturn(PETSC_SUCCESS);
9691: }
9693: /*@
9694: MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to `MatFactorGetSchurComplement()`
9696: Logically Collective
9698: Input Parameters:
9699: + F - the factored matrix obtained by calling `MatGetFactor()`
9700: . S - location where the Schur complement is stored
9701: - status - the status of the Schur complement matrix (see `MatFactorSchurStatus`)
9703: Level: advanced
9705: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9706: @*/
9707: PetscErrorCode MatFactorRestoreSchurComplement(Mat F, Mat *S, MatFactorSchurStatus status)
9708: {
9709: PetscFunctionBegin;
9711: if (S) {
9713: *S = NULL;
9714: }
9715: F->schur_status = status;
9716: PetscCall(MatFactorUpdateSchurStatus_Private(F));
9717: PetscFunctionReturn(PETSC_SUCCESS);
9718: }
9720: /*@
9721: MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9723: Logically Collective
9725: Input Parameters:
9726: + F - the factored matrix obtained by calling `MatGetFactor()`
9727: . rhs - location where the right hand side of the Schur complement system is stored
9728: - sol - location where the solution of the Schur complement system has to be returned
9730: Level: advanced
9732: Notes:
9733: The sizes of the vectors should match the size of the Schur complement
9735: Must be called after `MatFactorSetSchurIS()`
9737: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9738: @*/
9739: PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9740: {
9741: PetscFunctionBegin;
9748: PetscCheckSameComm(F, 1, rhs, 2);
9749: PetscCheckSameComm(F, 1, sol, 3);
9750: PetscCall(MatFactorFactorizeSchurComplement(F));
9751: switch (F->schur_status) {
9752: case MAT_FACTOR_SCHUR_FACTORED:
9753: PetscCall(MatSolveTranspose(F->schur, rhs, sol));
9754: break;
9755: case MAT_FACTOR_SCHUR_INVERTED:
9756: PetscCall(MatMultTranspose(F->schur, rhs, sol));
9757: break;
9758: default:
9759: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9760: }
9761: PetscFunctionReturn(PETSC_SUCCESS);
9762: }
9764: /*@
9765: MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9767: Logically Collective
9769: Input Parameters:
9770: + F - the factored matrix obtained by calling `MatGetFactor()`
9771: . rhs - location where the right hand side of the Schur complement system is stored
9772: - sol - location where the solution of the Schur complement system has to be returned
9774: Level: advanced
9776: Notes:
9777: The sizes of the vectors should match the size of the Schur complement
9779: Must be called after `MatFactorSetSchurIS()`
9781: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9782: @*/
9783: PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9784: {
9785: PetscFunctionBegin;
9792: PetscCheckSameComm(F, 1, rhs, 2);
9793: PetscCheckSameComm(F, 1, sol, 3);
9794: PetscCall(MatFactorFactorizeSchurComplement(F));
9795: switch (F->schur_status) {
9796: case MAT_FACTOR_SCHUR_FACTORED:
9797: PetscCall(MatSolve(F->schur, rhs, sol));
9798: break;
9799: case MAT_FACTOR_SCHUR_INVERTED:
9800: PetscCall(MatMult(F->schur, rhs, sol));
9801: break;
9802: default:
9803: SETERRQ(PetscObjectComm((PetscObject)F), PETSC_ERR_SUP, "Unhandled MatFactorSchurStatus %d", F->schur_status);
9804: }
9805: PetscFunctionReturn(PETSC_SUCCESS);
9806: }
9808: PETSC_EXTERN PetscErrorCode MatSeqDenseInvertFactors_Private(Mat);
9809: #if PetscDefined(HAVE_CUDA)
9810: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqDenseCUDAInvertFactors_Internal(Mat);
9811: #endif
9813: /* Schur status updated in the interface */
9814: static PetscErrorCode MatFactorInvertSchurComplement_Private(Mat F)
9815: {
9816: Mat S = F->schur;
9818: PetscFunctionBegin;
9819: if (S) {
9820: PetscMPIInt size;
9821: PetscBool isdense, isdensecuda;
9823: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)S), &size));
9824: PetscCheck(size <= 1, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not yet implemented");
9825: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSE, &isdense));
9826: PetscCall(PetscObjectTypeCompare((PetscObject)S, MATSEQDENSECUDA, &isdensecuda));
9827: PetscCheck(isdense || isdensecuda, PetscObjectComm((PetscObject)S), PETSC_ERR_SUP, "Not implemented for type %s", ((PetscObject)S)->type_name);
9828: PetscCall(PetscLogEventBegin(MAT_FactorInvS, F, 0, 0, 0));
9829: if (isdense) {
9830: PetscCall(MatSeqDenseInvertFactors_Private(S));
9831: } else if (isdensecuda) {
9832: #if defined(PETSC_HAVE_CUDA)
9833: PetscCall(MatSeqDenseCUDAInvertFactors_Internal(S));
9834: #endif
9835: }
9836: // HIP??????????????
9837: PetscCall(PetscLogEventEnd(MAT_FactorInvS, F, 0, 0, 0));
9838: }
9839: PetscFunctionReturn(PETSC_SUCCESS);
9840: }
9842: /*@
9843: MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9845: Logically Collective
9847: Input Parameter:
9848: . F - the factored matrix obtained by calling `MatGetFactor()`
9850: Level: advanced
9852: Notes:
9853: Must be called after `MatFactorSetSchurIS()`.
9855: Call `MatFactorGetSchurComplement()` or `MatFactorCreateSchurComplement()` AFTER this call to actually compute the inverse and get access to it.
9857: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9858: @*/
9859: PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9860: {
9861: PetscFunctionBegin;
9864: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(PETSC_SUCCESS);
9865: PetscCall(MatFactorFactorizeSchurComplement(F));
9866: PetscCall(MatFactorInvertSchurComplement_Private(F));
9867: F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9868: PetscFunctionReturn(PETSC_SUCCESS);
9869: }
9871: /*@
9872: MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9874: Logically Collective
9876: Input Parameter:
9877: . F - the factored matrix obtained by calling `MatGetFactor()`
9879: Level: advanced
9881: Note:
9882: Must be called after `MatFactorSetSchurIS()`
9884: .seealso: [](ch_matrices), `Mat`, `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
9885: @*/
9886: PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9887: {
9888: MatFactorInfo info;
9890: PetscFunctionBegin;
9893: if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(PETSC_SUCCESS);
9894: PetscCall(PetscLogEventBegin(MAT_FactorFactS, F, 0, 0, 0));
9895: PetscCall(PetscMemzero(&info, sizeof(MatFactorInfo)));
9896: if (F->factortype == MAT_FACTOR_CHOLESKY) { /* LDL^t regarded as Cholesky */
9897: PetscCall(MatCholeskyFactor(F->schur, NULL, &info));
9898: } else {
9899: PetscCall(MatLUFactor(F->schur, NULL, NULL, &info));
9900: }
9901: PetscCall(PetscLogEventEnd(MAT_FactorFactS, F, 0, 0, 0));
9902: F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9903: PetscFunctionReturn(PETSC_SUCCESS);
9904: }
9906: /*@
9907: MatPtAP - Creates the matrix product $C = P^T * A * P$
9909: Neighbor-wise Collective
9911: Input Parameters:
9912: + A - the matrix
9913: . P - the projection matrix
9914: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9915: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use `PETSC_DEFAULT` if you do not have a good estimate
9916: if the result is a dense matrix this is irrelevant
9918: Output Parameter:
9919: . C - the product matrix
9921: Level: intermediate
9923: Notes:
9924: C will be created and must be destroyed by the user with `MatDestroy()`.
9926: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
9928: Developer Note:
9929: For matrix types without special implementation the function fallbacks to `MatMatMult()` followed by `MatTransposeMatMult()`.
9931: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatRARt()`
9932: @*/
9933: PetscErrorCode MatPtAP(Mat A, Mat P, MatReuse scall, PetscReal fill, Mat *C)
9934: {
9935: PetscFunctionBegin;
9936: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
9937: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
9939: if (scall == MAT_INITIAL_MATRIX) {
9940: PetscCall(MatProductCreate(A, P, NULL, C));
9941: PetscCall(MatProductSetType(*C, MATPRODUCT_PtAP));
9942: PetscCall(MatProductSetAlgorithm(*C, "default"));
9943: PetscCall(MatProductSetFill(*C, fill));
9945: (*C)->product->api_user = PETSC_TRUE;
9946: PetscCall(MatProductSetFromOptions(*C));
9947: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and P %s", MatProductTypes[MATPRODUCT_PtAP], ((PetscObject)A)->type_name, ((PetscObject)P)->type_name);
9948: PetscCall(MatProductSymbolic(*C));
9949: } else { /* scall == MAT_REUSE_MATRIX */
9950: PetscCall(MatProductReplaceMats(A, P, NULL, *C));
9951: }
9953: PetscCall(MatProductNumeric(*C));
9954: (*C)->symmetric = A->symmetric;
9955: (*C)->spd = A->spd;
9956: PetscFunctionReturn(PETSC_SUCCESS);
9957: }
9959: /*@
9960: MatRARt - Creates the matrix product $C = R * A * R^T$
9962: Neighbor-wise Collective
9964: Input Parameters:
9965: + A - the matrix
9966: . R - the projection matrix
9967: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
9968: - fill - expected fill as ratio of nnz(C)/nnz(A), use `PETSC_DEFAULT` if you do not have a good estimate
9969: if the result is a dense matrix this is irrelevant
9971: Output Parameter:
9972: . C - the product matrix
9974: Level: intermediate
9976: Notes:
9977: C will be created and must be destroyed by the user with `MatDestroy()`.
9979: An alternative approach to this function is to use `MatProductCreate()` and set the desired options before the computation is done
9981: This routine is currently only implemented for pairs of `MATAIJ` matrices and classes
9982: which inherit from `MATAIJ`. Due to PETSc sparse matrix block row distribution among processes,
9983: parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
9984: We recommend using MatPtAP().
9986: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MatMatMult()`, `MatPtAP()`
9987: @*/
9988: PetscErrorCode MatRARt(Mat A, Mat R, MatReuse scall, PetscReal fill, Mat *C)
9989: {
9990: PetscFunctionBegin;
9991: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C, 5);
9992: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
9994: if (scall == MAT_INITIAL_MATRIX) {
9995: PetscCall(MatProductCreate(A, R, NULL, C));
9996: PetscCall(MatProductSetType(*C, MATPRODUCT_RARt));
9997: PetscCall(MatProductSetAlgorithm(*C, "default"));
9998: PetscCall(MatProductSetFill(*C, fill));
10000: (*C)->product->api_user = PETSC_TRUE;
10001: PetscCall(MatProductSetFromOptions(*C));
10002: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s and R %s", MatProductTypes[MATPRODUCT_RARt], ((PetscObject)A)->type_name, ((PetscObject)R)->type_name);
10003: PetscCall(MatProductSymbolic(*C));
10004: } else { /* scall == MAT_REUSE_MATRIX */
10005: PetscCall(MatProductReplaceMats(A, R, NULL, *C));
10006: }
10008: PetscCall(MatProductNumeric(*C));
10009: if (A->symmetric == PETSC_BOOL3_TRUE) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10010: PetscFunctionReturn(PETSC_SUCCESS);
10011: }
10013: static PetscErrorCode MatProduct_Private(Mat A, Mat B, MatReuse scall, PetscReal fill, MatProductType ptype, Mat *C)
10014: {
10015: PetscBool flg = PETSC_TRUE;
10017: PetscFunctionBegin;
10018: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "MAT_INPLACE_MATRIX product not supported");
10019: if (scall == MAT_INITIAL_MATRIX) {
10020: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n", MatProductTypes[ptype]));
10021: PetscCall(MatProductCreate(A, B, NULL, C));
10022: PetscCall(MatProductSetAlgorithm(*C, MATPRODUCTALGORITHMDEFAULT));
10023: PetscCall(MatProductSetFill(*C, fill));
10024: } else { /* scall == MAT_REUSE_MATRIX */
10025: Mat_Product *product = (*C)->product;
10027: PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)(*C), &flg, MATSEQDENSE, MATMPIDENSE, ""));
10028: if (flg && product && product->type != ptype) {
10029: PetscCall(MatProductClear(*C));
10030: product = NULL;
10031: }
10032: PetscCall(PetscInfo(A, "Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n", product ? "with" : "without", MatProductTypes[ptype]));
10033: if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
10034: PetscCheck(flg, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "Call MatProductCreate() first");
10035: PetscCall(MatProductCreate_Private(A, B, NULL, *C));
10036: product = (*C)->product;
10037: product->fill = fill;
10038: product->clear = PETSC_TRUE;
10039: } else { /* user may change input matrices A or B when MAT_REUSE_MATRIX */
10040: flg = PETSC_FALSE;
10041: PetscCall(MatProductReplaceMats(A, B, NULL, *C));
10042: }
10043: }
10044: if (flg) {
10045: (*C)->product->api_user = PETSC_TRUE;
10046: PetscCall(MatProductSetType(*C, ptype));
10047: PetscCall(MatProductSetFromOptions(*C));
10048: PetscCheck((*C)->ops->productsymbolic, PetscObjectComm((PetscObject)(*C)), PETSC_ERR_SUP, "MatProduct %s not supported for %s and %s", MatProductTypes[ptype], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name);
10049: PetscCall(MatProductSymbolic(*C));
10050: }
10051: PetscCall(MatProductNumeric(*C));
10052: PetscFunctionReturn(PETSC_SUCCESS);
10053: }
10055: /*@
10056: MatMatMult - Performs matrix-matrix multiplication C=A*B.
10058: Neighbor-wise Collective
10060: Input Parameters:
10061: + A - the left matrix
10062: . B - the right matrix
10063: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10064: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if you do not have a good estimate
10065: if the result is a dense matrix this is irrelevant
10067: Output Parameter:
10068: . C - the product matrix
10070: Notes:
10071: Unless scall is `MAT_REUSE_MATRIX` C will be created.
10073: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
10074: call to this function with `MAT_INITIAL_MATRIX`.
10076: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.
10078: In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with `MAT_REUSE_MATRIX`,
10079: rather than first having `MatMatMult()` create it for you. You can NEVER do this if the matrix C is sparse.
10081: Example of Usage:
10082: .vb
10083: MatProductCreate(A,B,NULL,&C);
10084: MatProductSetType(C,MATPRODUCT_AB);
10085: MatProductSymbolic(C);
10086: MatProductNumeric(C); // compute C=A * B
10087: MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
10088: MatProductNumeric(C);
10089: MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
10090: MatProductNumeric(C);
10091: .ve
10093: Level: intermediate
10095: .seealso: [](ch_matrices), `Mat`, `MatProductType`, `MATPRODUCT_AB`, `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
10096: @*/
10097: PetscErrorCode MatMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10098: {
10099: PetscFunctionBegin;
10100: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AB, C));
10101: PetscFunctionReturn(PETSC_SUCCESS);
10102: }
10104: /*@
10105: MatMatTransposeMult - Performs matrix-matrix multiplication $C = A*B^T$.
10107: Neighbor-wise Collective
10109: Input Parameters:
10110: + A - the left matrix
10111: . B - the right matrix
10112: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10113: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10115: Output Parameter:
10116: . C - the product matrix
10118: Options Database Key:
10119: . -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for `MATMPIDENSE` matrices: the
10120: first redundantly copies the transposed `B` matrix on each process and requires O(log P) communication complexity;
10121: the second never stores more than one portion of the `B` matrix at a time but requires O(P) communication complexity.
10123: Level: intermediate
10125: Notes:
10126: C will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10128: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call
10130: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10131: actually needed.
10133: This routine is currently only implemented for pairs of `MATSEQAIJ` matrices, for the `MATSEQDENSE` class,
10134: and for pairs of `MATMPIDENSE` matrices.
10136: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABt`
10138: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABt`, `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`, `MatProductAlgorithm`, `MatProductType`
10139: @*/
10140: PetscErrorCode MatMatTransposeMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10141: {
10142: PetscFunctionBegin;
10143: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_ABt, C));
10144: if (A == B) PetscCall(MatSetOption(*C, MAT_SYMMETRIC, PETSC_TRUE));
10145: PetscFunctionReturn(PETSC_SUCCESS);
10146: }
10148: /*@
10149: MatTransposeMatMult - Performs matrix-matrix multiplication $C = A^T*B$.
10151: Neighbor-wise Collective
10153: Input Parameters:
10154: + A - the left matrix
10155: . B - the right matrix
10156: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10157: - fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use `PETSC_DEFAULT` if not known
10159: Output Parameter:
10160: . C - the product matrix
10162: Level: intermediate
10164: Notes:
10165: `C` will be created if `MAT_INITIAL_MATRIX` and must be destroyed by the user with `MatDestroy()`.
10167: `MAT_REUSE_MATRIX` can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
10169: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_AtB`
10171: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10172: actually needed.
10174: This routine is currently implemented for pairs of `MATAIJ` matrices and pairs of `MATSEQDENSE` matrices and classes
10175: which inherit from `MATSEQAIJ`. `C` will be of the same type as the input matrices.
10177: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_AtB`, `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
10178: @*/
10179: PetscErrorCode MatTransposeMatMult(Mat A, Mat B, MatReuse scall, PetscReal fill, Mat *C)
10180: {
10181: PetscFunctionBegin;
10182: PetscCall(MatProduct_Private(A, B, scall, fill, MATPRODUCT_AtB, C));
10183: PetscFunctionReturn(PETSC_SUCCESS);
10184: }
10186: /*@
10187: MatMatMatMult - Performs matrix-matrix-matrix multiplication D=A*B*C.
10189: Neighbor-wise Collective
10191: Input Parameters:
10192: + A - the left matrix
10193: . B - the middle matrix
10194: . C - the right matrix
10195: . scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10196: - fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use `PETSC_DEFAULT` if you do not have a good estimate
10197: if the result is a dense matrix this is irrelevant
10199: Output Parameter:
10200: . D - the product matrix
10202: Level: intermediate
10204: Notes:
10205: Unless `scall` is `MAT_REUSE_MATRIX` D will be created.
10207: `MAT_REUSE_MATRIX` can only be used if the matrices `A`, `B`, and `C` have the same nonzero pattern as in the previous call
10209: This routine is shorthand for using `MatProductCreate()` with the `MatProductType` of `MATPRODUCT_ABC`
10211: To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
10212: actually needed.
10214: If you have many matrices with the same non-zero structure to multiply, you
10215: should use `MAT_REUSE_MATRIX` in all calls but the first
10217: .seealso: [](ch_matrices), `Mat`, `MatProductCreate()`, `MATPRODUCT_ABC`, `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
10218: @*/
10219: PetscErrorCode MatMatMatMult(Mat A, Mat B, Mat C, MatReuse scall, PetscReal fill, Mat *D)
10220: {
10221: PetscFunctionBegin;
10222: if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D, 6);
10223: PetscCheck(scall != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10225: if (scall == MAT_INITIAL_MATRIX) {
10226: PetscCall(MatProductCreate(A, B, C, D));
10227: PetscCall(MatProductSetType(*D, MATPRODUCT_ABC));
10228: PetscCall(MatProductSetAlgorithm(*D, "default"));
10229: PetscCall(MatProductSetFill(*D, fill));
10231: (*D)->product->api_user = PETSC_TRUE;
10232: PetscCall(MatProductSetFromOptions(*D));
10233: PetscCheck((*D)->ops->productsymbolic, PetscObjectComm((PetscObject)(*D)), PETSC_ERR_SUP, "MatProduct %s not supported for A %s, B %s and C %s", MatProductTypes[MATPRODUCT_ABC], ((PetscObject)A)->type_name, ((PetscObject)B)->type_name,
10234: ((PetscObject)C)->type_name);
10235: PetscCall(MatProductSymbolic(*D));
10236: } else { /* user may change input matrices when REUSE */
10237: PetscCall(MatProductReplaceMats(A, B, C, *D));
10238: }
10239: PetscCall(MatProductNumeric(*D));
10240: PetscFunctionReturn(PETSC_SUCCESS);
10241: }
10243: /*@
10244: MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
10246: Collective
10248: Input Parameters:
10249: + mat - the matrix
10250: . nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
10251: . subcomm - MPI communicator split from the communicator where mat resides in (or `MPI_COMM_NULL` if nsubcomm is used)
10252: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10254: Output Parameter:
10255: . matredundant - redundant matrix
10257: Level: advanced
10259: Notes:
10260: `MAT_REUSE_MATRIX` can only be used when the nonzero structure of the
10261: original matrix has not changed from that last call to `MatCreateRedundantMatrix()`.
10263: This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
10264: calling it.
10266: `PetscSubcommCreate()` can be used to manage the creation of the subcomm but need not be.
10268: .seealso: [](ch_matrices), `Mat`, `MatDestroy()`, `PetscSubcommCreate()`, `PetscSubcomm`
10269: @*/
10270: PetscErrorCode MatCreateRedundantMatrix(Mat mat, PetscInt nsubcomm, MPI_Comm subcomm, MatReuse reuse, Mat *matredundant)
10271: {
10272: MPI_Comm comm;
10273: PetscMPIInt size;
10274: PetscInt mloc_sub, nloc_sub, rstart, rend, M = mat->rmap->N, N = mat->cmap->N, bs = mat->rmap->bs;
10275: Mat_Redundant *redund = NULL;
10276: PetscSubcomm psubcomm = NULL;
10277: MPI_Comm subcomm_in = subcomm;
10278: Mat *matseq;
10279: IS isrow, iscol;
10280: PetscBool newsubcomm = PETSC_FALSE;
10282: PetscFunctionBegin;
10284: if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
10285: PetscAssertPointer(*matredundant, 5);
10287: }
10289: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
10290: if (size == 1 || nsubcomm == 1) {
10291: if (reuse == MAT_INITIAL_MATRIX) {
10292: PetscCall(MatDuplicate(mat, MAT_COPY_VALUES, matredundant));
10293: } else {
10294: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10295: PetscCall(MatCopy(mat, *matredundant, SAME_NONZERO_PATTERN));
10296: }
10297: PetscFunctionReturn(PETSC_SUCCESS);
10298: }
10300: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10301: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10302: MatCheckPreallocated(mat, 1);
10304: PetscCall(PetscLogEventBegin(MAT_RedundantMat, mat, 0, 0, 0));
10305: if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
10306: /* create psubcomm, then get subcomm */
10307: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10308: PetscCallMPI(MPI_Comm_size(comm, &size));
10309: PetscCheck(nsubcomm >= 1 && nsubcomm <= size, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "nsubcomm must between 1 and %d", size);
10311: PetscCall(PetscSubcommCreate(comm, &psubcomm));
10312: PetscCall(PetscSubcommSetNumber(psubcomm, nsubcomm));
10313: PetscCall(PetscSubcommSetType(psubcomm, PETSC_SUBCOMM_CONTIGUOUS));
10314: PetscCall(PetscSubcommSetFromOptions(psubcomm));
10315: PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm), &subcomm, NULL));
10316: newsubcomm = PETSC_TRUE;
10317: PetscCall(PetscSubcommDestroy(&psubcomm));
10318: }
10320: /* get isrow, iscol and a local sequential matrix matseq[0] */
10321: if (reuse == MAT_INITIAL_MATRIX) {
10322: mloc_sub = PETSC_DECIDE;
10323: nloc_sub = PETSC_DECIDE;
10324: if (bs < 1) {
10325: PetscCall(PetscSplitOwnership(subcomm, &mloc_sub, &M));
10326: PetscCall(PetscSplitOwnership(subcomm, &nloc_sub, &N));
10327: } else {
10328: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &mloc_sub, &M));
10329: PetscCall(PetscSplitOwnershipBlock(subcomm, bs, &nloc_sub, &N));
10330: }
10331: PetscCallMPI(MPI_Scan(&mloc_sub, &rend, 1, MPIU_INT, MPI_SUM, subcomm));
10332: rstart = rend - mloc_sub;
10333: PetscCall(ISCreateStride(PETSC_COMM_SELF, mloc_sub, rstart, 1, &isrow));
10334: PetscCall(ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol));
10335: PetscCall(ISSetIdentity(iscol));
10336: } else { /* reuse == MAT_REUSE_MATRIX */
10337: PetscCheck(*matredundant != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10338: /* retrieve subcomm */
10339: PetscCall(PetscObjectGetComm((PetscObject)(*matredundant), &subcomm));
10340: redund = (*matredundant)->redundant;
10341: isrow = redund->isrow;
10342: iscol = redund->iscol;
10343: matseq = redund->matseq;
10344: }
10345: PetscCall(MatCreateSubMatrices(mat, 1, &isrow, &iscol, reuse, &matseq));
10347: /* get matredundant over subcomm */
10348: if (reuse == MAT_INITIAL_MATRIX) {
10349: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], nloc_sub, reuse, matredundant));
10351: /* create a supporting struct and attach it to C for reuse */
10352: PetscCall(PetscNew(&redund));
10353: (*matredundant)->redundant = redund;
10354: redund->isrow = isrow;
10355: redund->iscol = iscol;
10356: redund->matseq = matseq;
10357: if (newsubcomm) {
10358: redund->subcomm = subcomm;
10359: } else {
10360: redund->subcomm = MPI_COMM_NULL;
10361: }
10362: } else {
10363: PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm, matseq[0], PETSC_DECIDE, reuse, matredundant));
10364: }
10365: #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) || defined(PETSC_HAVE_HIP)
10366: if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10367: PetscCall(MatBindToCPU(*matredundant, PETSC_TRUE));
10368: PetscCall(MatSetBindingPropagates(*matredundant, PETSC_TRUE));
10369: }
10370: #endif
10371: PetscCall(PetscLogEventEnd(MAT_RedundantMat, mat, 0, 0, 0));
10372: PetscFunctionReturn(PETSC_SUCCESS);
10373: }
10375: /*@C
10376: MatGetMultiProcBlock - Create multiple 'parallel submatrices' from
10377: a given `Mat`. Each submatrix can span multiple procs.
10379: Collective
10381: Input Parameters:
10382: + mat - the matrix
10383: . subComm - the sub communicator obtained as if by `MPI_Comm_split(PetscObjectComm((PetscObject)mat))`
10384: - scall - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10386: Output Parameter:
10387: . subMat - parallel sub-matrices each spanning a given `subcomm`
10389: Level: advanced
10391: Notes:
10392: The submatrix partition across processors is dictated by `subComm` a
10393: communicator obtained by `MPI_comm_split()` or via `PetscSubcommCreate()`. The `subComm`
10394: is not restricted to be grouped with consecutive original MPI processes.
10396: Due the `MPI_Comm_split()` usage, the parallel layout of the submatrices
10397: map directly to the layout of the original matrix [wrt the local
10398: row,col partitioning]. So the original 'DiagonalMat' naturally maps
10399: into the 'DiagonalMat' of the `subMat`, hence it is used directly from
10400: the `subMat`. However the offDiagMat looses some columns - and this is
10401: reconstructed with `MatSetValues()`
10403: This is used by `PCBJACOBI` when a single block spans multiple MPI processes.
10405: .seealso: [](ch_matrices), `Mat`, `MatCreateRedundantMatrix()`, `MatCreateSubMatrices()`, `PCBJACOBI`
10406: @*/
10407: PetscErrorCode MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall, Mat *subMat)
10408: {
10409: PetscMPIInt commsize, subCommSize;
10411: PetscFunctionBegin;
10412: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &commsize));
10413: PetscCallMPI(MPI_Comm_size(subComm, &subCommSize));
10414: PetscCheck(subCommSize <= commsize, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_OUTOFRANGE, "CommSize %d < SubCommZize %d", commsize, subCommSize);
10416: PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10417: PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10418: PetscUseTypeMethod(mat, getmultiprocblock, subComm, scall, subMat);
10419: PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock, mat, 0, 0, 0));
10420: PetscFunctionReturn(PETSC_SUCCESS);
10421: }
10423: /*@
10424: MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10426: Not Collective
10428: Input Parameters:
10429: + mat - matrix to extract local submatrix from
10430: . isrow - local row indices for submatrix
10431: - iscol - local column indices for submatrix
10433: Output Parameter:
10434: . submat - the submatrix
10436: Level: intermediate
10438: Notes:
10439: `submat` should be disposed of with `MatRestoreLocalSubMatrix()`.
10441: Depending on the format of `mat`, the returned `submat` may not implement `MatMult()`. Its communicator may be
10442: the same as `mat`, it may be `PETSC_COMM_SELF`, or some other sub-communictor of `mat`'s.
10444: `submat` always implements `MatSetValuesLocal()`. If `isrow` and `iscol` have the same block size, then
10445: `MatSetValuesBlockedLocal()` will also be implemented.
10447: `mat` must have had a `ISLocalToGlobalMapping` provided to it with `MatSetLocalToGlobalMapping()`.
10448: Matrices obtained with `DMCreateMatrix()` generally already have the local to global mapping provided.
10450: .seealso: [](ch_matrices), `Mat`, `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10451: @*/
10452: PetscErrorCode MatGetLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10453: {
10454: PetscFunctionBegin;
10458: PetscCheckSameComm(isrow, 2, iscol, 3);
10459: PetscAssertPointer(submat, 4);
10460: PetscCheck(mat->rmap->mapping, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Matrix must have local to global mapping provided before this call");
10462: if (mat->ops->getlocalsubmatrix) {
10463: PetscUseTypeMethod(mat, getlocalsubmatrix, isrow, iscol, submat);
10464: } else {
10465: PetscCall(MatCreateLocalRef(mat, isrow, iscol, submat));
10466: }
10467: PetscFunctionReturn(PETSC_SUCCESS);
10468: }
10470: /*@
10471: MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering obtained with `MatGetLocalSubMatrix()`
10473: Not Collective
10475: Input Parameters:
10476: + mat - matrix to extract local submatrix from
10477: . isrow - local row indices for submatrix
10478: . iscol - local column indices for submatrix
10479: - submat - the submatrix
10481: Level: intermediate
10483: .seealso: [](ch_matrices), `Mat`, `MatGetLocalSubMatrix()`
10484: @*/
10485: PetscErrorCode MatRestoreLocalSubMatrix(Mat mat, IS isrow, IS iscol, Mat *submat)
10486: {
10487: PetscFunctionBegin;
10491: PetscCheckSameComm(isrow, 2, iscol, 3);
10492: PetscAssertPointer(submat, 4);
10495: if (mat->ops->restorelocalsubmatrix) {
10496: PetscUseTypeMethod(mat, restorelocalsubmatrix, isrow, iscol, submat);
10497: } else {
10498: PetscCall(MatDestroy(submat));
10499: }
10500: *submat = NULL;
10501: PetscFunctionReturn(PETSC_SUCCESS);
10502: }
10504: /*@
10505: MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10507: Collective
10509: Input Parameter:
10510: . mat - the matrix
10512: Output Parameter:
10513: . is - if any rows have zero diagonals this contains the list of them
10515: Level: developer
10517: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10518: @*/
10519: PetscErrorCode MatFindZeroDiagonals(Mat mat, IS *is)
10520: {
10521: PetscFunctionBegin;
10524: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10525: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10527: if (!mat->ops->findzerodiagonals) {
10528: Vec diag;
10529: const PetscScalar *a;
10530: PetscInt *rows;
10531: PetscInt rStart, rEnd, r, nrow = 0;
10533: PetscCall(MatCreateVecs(mat, &diag, NULL));
10534: PetscCall(MatGetDiagonal(mat, diag));
10535: PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10536: PetscCall(VecGetArrayRead(diag, &a));
10537: for (r = 0; r < rEnd - rStart; ++r)
10538: if (a[r] == 0.0) ++nrow;
10539: PetscCall(PetscMalloc1(nrow, &rows));
10540: nrow = 0;
10541: for (r = 0; r < rEnd - rStart; ++r)
10542: if (a[r] == 0.0) rows[nrow++] = r + rStart;
10543: PetscCall(VecRestoreArrayRead(diag, &a));
10544: PetscCall(VecDestroy(&diag));
10545: PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat), nrow, rows, PETSC_OWN_POINTER, is));
10546: } else {
10547: PetscUseTypeMethod(mat, findzerodiagonals, is);
10548: }
10549: PetscFunctionReturn(PETSC_SUCCESS);
10550: }
10552: /*@
10553: MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10555: Collective
10557: Input Parameter:
10558: . mat - the matrix
10560: Output Parameter:
10561: . is - contains the list of rows with off block diagonal entries
10563: Level: developer
10565: .seealso: [](ch_matrices), `Mat`, `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10566: @*/
10567: PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat, IS *is)
10568: {
10569: PetscFunctionBegin;
10572: PetscCheck(mat->assembled, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10573: PetscCheck(!mat->factortype, PetscObjectComm((PetscObject)mat), PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10575: PetscUseTypeMethod(mat, findoffblockdiagonalentries, is);
10576: PetscFunctionReturn(PETSC_SUCCESS);
10577: }
10579: /*@C
10580: MatInvertBlockDiagonal - Inverts the block diagonal entries.
10582: Collective; No Fortran Support
10584: Input Parameter:
10585: . mat - the matrix
10587: Output Parameter:
10588: . values - the block inverses in column major order (FORTRAN-like)
10590: Level: advanced
10592: Notes:
10593: The size of the blocks is determined by the block size of the matrix.
10595: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10597: The blocks all have the same size, use `MatInvertVariableBlockDiagonal()` for variable block size
10599: .seealso: [](ch_matrices), `Mat`, `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10600: @*/
10601: PetscErrorCode MatInvertBlockDiagonal(Mat mat, const PetscScalar **values)
10602: {
10603: PetscFunctionBegin;
10605: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10606: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10607: PetscUseTypeMethod(mat, invertblockdiagonal, values);
10608: PetscFunctionReturn(PETSC_SUCCESS);
10609: }
10611: /*@C
10612: MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10614: Collective; No Fortran Support
10616: Input Parameters:
10617: + mat - the matrix
10618: . nblocks - the number of blocks on the process, set with `MatSetVariableBlockSizes()`
10619: - bsizes - the size of each block on the process, set with `MatSetVariableBlockSizes()`
10621: Output Parameter:
10622: . values - the block inverses in column major order (FORTRAN-like)
10624: Level: advanced
10626: Notes:
10627: Use `MatInvertBlockDiagonal()` if all blocks have the same size
10629: The blocks never overlap between two MPI processes, use `MatInvertVariableBlockEnvelope()` for that case
10631: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10632: @*/
10633: PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat, PetscInt nblocks, const PetscInt *bsizes, PetscScalar *values)
10634: {
10635: PetscFunctionBegin;
10637: PetscCheck(mat->assembled, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for unassembled matrix");
10638: PetscCheck(!mat->factortype, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not for factored matrix");
10639: PetscUseTypeMethod(mat, invertvariableblockdiagonal, nblocks, bsizes, values);
10640: PetscFunctionReturn(PETSC_SUCCESS);
10641: }
10643: /*@
10644: MatInvertBlockDiagonalMat - set the values of matrix C to be the inverted block diagonal of matrix A
10646: Collective
10648: Input Parameters:
10649: + A - the matrix
10650: - C - matrix with inverted block diagonal of `A`. This matrix should be created and may have its type set.
10652: Level: advanced
10654: Note:
10655: The blocksize of the matrix is used to determine the blocks on the diagonal of `C`
10657: .seealso: [](ch_matrices), `Mat`, `MatInvertBlockDiagonal()`
10658: @*/
10659: PetscErrorCode MatInvertBlockDiagonalMat(Mat A, Mat C)
10660: {
10661: const PetscScalar *vals;
10662: PetscInt *dnnz;
10663: PetscInt m, rstart, rend, bs, i, j;
10665: PetscFunctionBegin;
10666: PetscCall(MatInvertBlockDiagonal(A, &vals));
10667: PetscCall(MatGetBlockSize(A, &bs));
10668: PetscCall(MatGetLocalSize(A, &m, NULL));
10669: PetscCall(MatSetLayouts(C, A->rmap, A->cmap));
10670: PetscCall(PetscMalloc1(m / bs, &dnnz));
10671: for (j = 0; j < m / bs; j++) dnnz[j] = 1;
10672: PetscCall(MatXAIJSetPreallocation(C, bs, dnnz, NULL, NULL, NULL));
10673: PetscCall(PetscFree(dnnz));
10674: PetscCall(MatGetOwnershipRange(C, &rstart, &rend));
10675: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_FALSE));
10676: for (i = rstart / bs; i < rend / bs; i++) PetscCall(MatSetValuesBlocked(C, 1, &i, 1, &i, &vals[(i - rstart / bs) * bs * bs], INSERT_VALUES));
10677: PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
10678: PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
10679: PetscCall(MatSetOption(C, MAT_ROW_ORIENTED, PETSC_TRUE));
10680: PetscFunctionReturn(PETSC_SUCCESS);
10681: }
10683: /*@C
10684: MatTransposeColoringDestroy - Destroys a coloring context for matrix product $C = A*B^T$ that was created
10685: via `MatTransposeColoringCreate()`.
10687: Collective
10689: Input Parameter:
10690: . c - coloring context
10692: Level: intermediate
10694: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`
10695: @*/
10696: PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10697: {
10698: MatTransposeColoring matcolor = *c;
10700: PetscFunctionBegin;
10701: if (!matcolor) PetscFunctionReturn(PETSC_SUCCESS);
10702: if (--((PetscObject)matcolor)->refct > 0) {
10703: matcolor = NULL;
10704: PetscFunctionReturn(PETSC_SUCCESS);
10705: }
10707: PetscCall(PetscFree3(matcolor->ncolumns, matcolor->nrows, matcolor->colorforrow));
10708: PetscCall(PetscFree(matcolor->rows));
10709: PetscCall(PetscFree(matcolor->den2sp));
10710: PetscCall(PetscFree(matcolor->colorforcol));
10711: PetscCall(PetscFree(matcolor->columns));
10712: if (matcolor->brows > 0) PetscCall(PetscFree(matcolor->lstart));
10713: PetscCall(PetscHeaderDestroy(c));
10714: PetscFunctionReturn(PETSC_SUCCESS);
10715: }
10717: /*@C
10718: MatTransColoringApplySpToDen - Given a symbolic matrix product $C = A*B^T$ for which
10719: a `MatTransposeColoring` context has been created, computes a dense $B^T$ by applying
10720: `MatTransposeColoring` to sparse `B`.
10722: Collective
10724: Input Parameters:
10725: + coloring - coloring context created with `MatTransposeColoringCreate()`
10726: - B - sparse matrix
10728: Output Parameter:
10729: . Btdense - dense matrix $B^T$
10731: Level: developer
10733: Note:
10734: These are used internally for some implementations of `MatRARt()`
10736: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10737: @*/
10738: PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring, Mat B, Mat Btdense)
10739: {
10740: PetscFunctionBegin;
10745: PetscCall((*B->ops->transcoloringapplysptoden)(coloring, B, Btdense));
10746: PetscFunctionReturn(PETSC_SUCCESS);
10747: }
10749: /*@C
10750: MatTransColoringApplyDenToSp - Given a symbolic matrix product $C_{sp} = A*B^T$ for which
10751: a `MatTransposeColoring` context has been created and a dense matrix $C_{den} = A*B^T_{dense}$
10752: in which `B^T_{dens}` is obtained from `MatTransColoringApplySpToDen()`, recover sparse matrix
10753: $C_{sp}$ from $C_{den}$.
10755: Collective
10757: Input Parameters:
10758: + matcoloring - coloring context created with `MatTransposeColoringCreate()`
10759: - Cden - matrix product of a sparse matrix and a dense matrix Btdense
10761: Output Parameter:
10762: . Csp - sparse matrix
10764: Level: developer
10766: Note:
10767: These are used internally for some implementations of `MatRARt()`
10769: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10770: @*/
10771: PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring, Mat Cden, Mat Csp)
10772: {
10773: PetscFunctionBegin;
10778: PetscCall((*Csp->ops->transcoloringapplydentosp)(matcoloring, Cden, Csp));
10779: PetscCall(MatAssemblyBegin(Csp, MAT_FINAL_ASSEMBLY));
10780: PetscCall(MatAssemblyEnd(Csp, MAT_FINAL_ASSEMBLY));
10781: PetscFunctionReturn(PETSC_SUCCESS);
10782: }
10784: /*@C
10785: MatTransposeColoringCreate - Creates a matrix coloring context for the matrix product $C = A*B^T$.
10787: Collective
10789: Input Parameters:
10790: + mat - the matrix product C
10791: - iscoloring - the coloring of the matrix; usually obtained with `MatColoringCreate()` or `DMCreateColoring()`
10793: Output Parameter:
10794: . color - the new coloring context
10796: Level: intermediate
10798: .seealso: [](ch_matrices), `Mat`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10799: `MatTransColoringApplyDenToSp()`
10800: @*/
10801: PetscErrorCode MatTransposeColoringCreate(Mat mat, ISColoring iscoloring, MatTransposeColoring *color)
10802: {
10803: MatTransposeColoring c;
10804: MPI_Comm comm;
10806: PetscFunctionBegin;
10807: PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10808: PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
10809: PetscCall(PetscHeaderCreate(c, MAT_TRANSPOSECOLORING_CLASSID, "MatTransposeColoring", "Matrix product C=A*B^T via coloring", "Mat", comm, MatTransposeColoringDestroy, NULL));
10811: c->ctype = iscoloring->ctype;
10812: PetscUseTypeMethod(mat, transposecoloringcreate, iscoloring, c);
10814: *color = c;
10815: PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate, mat, 0, 0, 0));
10816: PetscFunctionReturn(PETSC_SUCCESS);
10817: }
10819: /*@
10820: MatGetNonzeroState - Returns a 64-bit integer representing the current state of nonzeros in the matrix. If the
10821: matrix has had no new nonzero locations added to (or removed from) the matrix since the previous call then the value will be the
10822: same, otherwise it will be larger
10824: Not Collective
10826: Input Parameter:
10827: . mat - the matrix
10829: Output Parameter:
10830: . state - the current state
10832: Level: intermediate
10834: Notes:
10835: You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10836: different matrices
10838: Use `PetscObjectStateGet()` to check for changes to the numerical values in a matrix
10840: Use the result of `PetscObjectGetId()` to compare if a previously checked matrix is the same as the current matrix, do not compare object pointers.
10842: .seealso: [](ch_matrices), `Mat`, `PetscObjectStateGet()`, `PetscObjectGetId()`
10843: @*/
10844: PetscErrorCode MatGetNonzeroState(Mat mat, PetscObjectState *state)
10845: {
10846: PetscFunctionBegin;
10848: *state = mat->nonzerostate;
10849: PetscFunctionReturn(PETSC_SUCCESS);
10850: }
10852: /*@
10853: MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10854: matrices from each processor
10856: Collective
10858: Input Parameters:
10859: + comm - the communicators the parallel matrix will live on
10860: . seqmat - the input sequential matrices
10861: . n - number of local columns (or `PETSC_DECIDE`)
10862: - reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10864: Output Parameter:
10865: . mpimat - the parallel matrix generated
10867: Level: developer
10869: Note:
10870: The number of columns of the matrix in EACH processor MUST be the same.
10872: .seealso: [](ch_matrices), `Mat`
10873: @*/
10874: PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm, Mat seqmat, PetscInt n, MatReuse reuse, Mat *mpimat)
10875: {
10876: PetscMPIInt size;
10878: PetscFunctionBegin;
10879: PetscCallMPI(MPI_Comm_size(comm, &size));
10880: if (size == 1) {
10881: if (reuse == MAT_INITIAL_MATRIX) {
10882: PetscCall(MatDuplicate(seqmat, MAT_COPY_VALUES, mpimat));
10883: } else {
10884: PetscCall(MatCopy(seqmat, *mpimat, SAME_NONZERO_PATTERN));
10885: }
10886: PetscFunctionReturn(PETSC_SUCCESS);
10887: }
10889: PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10891: PetscCall(PetscLogEventBegin(MAT_Merge, seqmat, 0, 0, 0));
10892: PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm, seqmat, n, reuse, mpimat));
10893: PetscCall(PetscLogEventEnd(MAT_Merge, seqmat, 0, 0, 0));
10894: PetscFunctionReturn(PETSC_SUCCESS);
10895: }
10897: /*@
10898: MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent MPI processes' ownership ranges.
10900: Collective
10902: Input Parameters:
10903: + A - the matrix to create subdomains from
10904: - N - requested number of subdomains
10906: Output Parameters:
10907: + n - number of subdomains resulting on this MPI process
10908: - iss - `IS` list with indices of subdomains on this MPI process
10910: Level: advanced
10912: Note:
10913: The number of subdomains must be smaller than the communicator size
10915: .seealso: [](ch_matrices), `Mat`, `IS`
10916: @*/
10917: PetscErrorCode MatSubdomainsCreateCoalesce(Mat A, PetscInt N, PetscInt *n, IS *iss[])
10918: {
10919: MPI_Comm comm, subcomm;
10920: PetscMPIInt size, rank, color;
10921: PetscInt rstart, rend, k;
10923: PetscFunctionBegin;
10924: PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
10925: PetscCallMPI(MPI_Comm_size(comm, &size));
10926: PetscCallMPI(MPI_Comm_rank(comm, &rank));
10927: PetscCheck(N >= 1 && N < (PetscInt)size, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT, size, N);
10928: *n = 1;
10929: k = ((PetscInt)size) / N + ((PetscInt)size % N > 0); /* There are up to k ranks to a color */
10930: color = rank / k;
10931: PetscCallMPI(MPI_Comm_split(comm, color, rank, &subcomm));
10932: PetscCall(PetscMalloc1(1, iss));
10933: PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
10934: PetscCall(ISCreateStride(subcomm, rend - rstart, rstart, 1, iss[0]));
10935: PetscCallMPI(MPI_Comm_free(&subcomm));
10936: PetscFunctionReturn(PETSC_SUCCESS);
10937: }
10939: /*@
10940: MatGalerkin - Constructs the coarse grid problem matrix via Galerkin projection.
10942: If the interpolation and restriction operators are the same, uses `MatPtAP()`.
10943: If they are not the same, uses `MatMatMatMult()`.
10945: Once the coarse grid problem is constructed, correct for interpolation operators
10946: that are not of full rank, which can legitimately happen in the case of non-nested
10947: geometric multigrid.
10949: Input Parameters:
10950: + restrct - restriction operator
10951: . dA - fine grid matrix
10952: . interpolate - interpolation operator
10953: . reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
10954: - fill - expected fill, use `PETSC_DEFAULT` if you do not have a good estimate
10956: Output Parameter:
10957: . A - the Galerkin coarse matrix
10959: Options Database Key:
10960: . -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
10962: Level: developer
10964: .seealso: [](ch_matrices), `Mat`, `MatPtAP()`, `MatMatMatMult()`
10965: @*/
10966: PetscErrorCode MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
10967: {
10968: IS zerorows;
10969: Vec diag;
10971: PetscFunctionBegin;
10972: PetscCheck(reuse != MAT_INPLACE_MATRIX, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Inplace product not supported");
10973: /* Construct the coarse grid matrix */
10974: if (interpolate == restrct) {
10975: PetscCall(MatPtAP(dA, interpolate, reuse, fill, A));
10976: } else {
10977: PetscCall(MatMatMatMult(restrct, dA, interpolate, reuse, fill, A));
10978: }
10980: /* If the interpolation matrix is not of full rank, A will have zero rows.
10981: This can legitimately happen in the case of non-nested geometric multigrid.
10982: In that event, we set the rows of the matrix to the rows of the identity,
10983: ignoring the equations (as the RHS will also be zero). */
10985: PetscCall(MatFindZeroRows(*A, &zerorows));
10987: if (zerorows != NULL) { /* if there are any zero rows */
10988: PetscCall(MatCreateVecs(*A, &diag, NULL));
10989: PetscCall(MatGetDiagonal(*A, diag));
10990: PetscCall(VecISSet(diag, zerorows, 1.0));
10991: PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
10992: PetscCall(VecDestroy(&diag));
10993: PetscCall(ISDestroy(&zerorows));
10994: }
10995: PetscFunctionReturn(PETSC_SUCCESS);
10996: }
10998: /*@C
10999: MatSetOperation - Allows user to set a matrix operation for any matrix type
11001: Logically Collective
11003: Input Parameters:
11004: + mat - the matrix
11005: . op - the name of the operation
11006: - f - the function that provides the operation
11008: Level: developer
11010: Example Usage:
11011: .vb
11012: extern PetscErrorCode usermult(Mat, Vec, Vec);
11014: PetscCall(MatCreateXXX(comm, ..., &A));
11015: PetscCall(MatSetOperation(A, MATOP_MULT, (PetscVoidFunction)usermult));
11016: .ve
11018: Notes:
11019: See the file `include/petscmat.h` for a complete list of matrix
11020: operations, which all have the form MATOP_<OPERATION>, where
11021: <OPERATION> is the name (in all capital letters) of the
11022: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11024: All user-provided functions (except for `MATOP_DESTROY`) should have the same calling
11025: sequence as the usual matrix interface routines, since they
11026: are intended to be accessed via the usual matrix interface
11027: routines, e.g.,
11028: .vb
11029: MatMult(Mat, Vec, Vec) -> usermult(Mat, Vec, Vec)
11030: .ve
11032: In particular each function MUST return `PETSC_SUCCESS` on success and
11033: nonzero on failure.
11035: This routine is distinct from `MatShellSetOperation()` in that it can be called on any matrix type.
11037: .seealso: [](ch_matrices), `Mat`, `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
11038: @*/
11039: PetscErrorCode MatSetOperation(Mat mat, MatOperation op, void (*f)(void))
11040: {
11041: PetscFunctionBegin;
11043: if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))(mat->ops->view)) mat->ops->viewnative = mat->ops->view;
11044: (((void (**)(void))mat->ops)[op]) = f;
11045: PetscFunctionReturn(PETSC_SUCCESS);
11046: }
11048: /*@C
11049: MatGetOperation - Gets a matrix operation for any matrix type.
11051: Not Collective
11053: Input Parameters:
11054: + mat - the matrix
11055: - op - the name of the operation
11057: Output Parameter:
11058: . f - the function that provides the operation
11060: Level: developer
11062: Example Usage:
11063: .vb
11064: PetscErrorCode (*usermult)(Mat, Vec, Vec);
11066: MatGetOperation(A, MATOP_MULT, (void (**)(void))&usermult);
11067: .ve
11069: Notes:
11070: See the file include/petscmat.h for a complete list of matrix
11071: operations, which all have the form MATOP_<OPERATION>, where
11072: <OPERATION> is the name (in all capital letters) of the
11073: user interface routine (e.g., `MatMult()` -> `MATOP_MULT`).
11075: This routine is distinct from `MatShellGetOperation()` in that it can be called on any matrix type.
11077: .seealso: [](ch_matrices), `Mat`, `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
11078: @*/
11079: PetscErrorCode MatGetOperation(Mat mat, MatOperation op, void (**f)(void))
11080: {
11081: PetscFunctionBegin;
11083: *f = (((void (**)(void))mat->ops)[op]);
11084: PetscFunctionReturn(PETSC_SUCCESS);
11085: }
11087: /*@
11088: MatHasOperation - Determines whether the given matrix supports the particular operation.
11090: Not Collective
11092: Input Parameters:
11093: + mat - the matrix
11094: - op - the operation, for example, `MATOP_GET_DIAGONAL`
11096: Output Parameter:
11097: . has - either `PETSC_TRUE` or `PETSC_FALSE`
11099: Level: advanced
11101: Note:
11102: See `MatSetOperation()` for additional discussion on naming convention and usage of `op`.
11104: .seealso: [](ch_matrices), `Mat`, `MatCreateShell()`, `MatGetOperation()`, `MatSetOperation()`
11105: @*/
11106: PetscErrorCode MatHasOperation(Mat mat, MatOperation op, PetscBool *has)
11107: {
11108: PetscFunctionBegin;
11110: PetscAssertPointer(has, 3);
11111: if (mat->ops->hasoperation) {
11112: PetscUseTypeMethod(mat, hasoperation, op, has);
11113: } else {
11114: if (((void **)mat->ops)[op]) *has = PETSC_TRUE;
11115: else {
11116: *has = PETSC_FALSE;
11117: if (op == MATOP_CREATE_SUBMATRIX) {
11118: PetscMPIInt size;
11120: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
11121: if (size == 1) PetscCall(MatHasOperation(mat, MATOP_CREATE_SUBMATRICES, has));
11122: }
11123: }
11124: }
11125: PetscFunctionReturn(PETSC_SUCCESS);
11126: }
11128: /*@
11129: MatHasCongruentLayouts - Determines whether the rows and columns layouts of the matrix are congruent
11131: Collective
11133: Input Parameter:
11134: . mat - the matrix
11136: Output Parameter:
11137: . cong - either `PETSC_TRUE` or `PETSC_FALSE`
11139: Level: beginner
11141: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatSetSizes()`, `PetscLayout`
11142: @*/
11143: PetscErrorCode MatHasCongruentLayouts(Mat mat, PetscBool *cong)
11144: {
11145: PetscFunctionBegin;
11148: PetscAssertPointer(cong, 2);
11149: if (!mat->rmap || !mat->cmap) {
11150: *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
11151: PetscFunctionReturn(PETSC_SUCCESS);
11152: }
11153: if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
11154: PetscCall(PetscLayoutSetUp(mat->rmap));
11155: PetscCall(PetscLayoutSetUp(mat->cmap));
11156: PetscCall(PetscLayoutCompare(mat->rmap, mat->cmap, cong));
11157: if (*cong) mat->congruentlayouts = 1;
11158: else mat->congruentlayouts = 0;
11159: } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
11160: PetscFunctionReturn(PETSC_SUCCESS);
11161: }
11163: PetscErrorCode MatSetInf(Mat A)
11164: {
11165: PetscFunctionBegin;
11166: PetscUseTypeMethod(A, setinf);
11167: PetscFunctionReturn(PETSC_SUCCESS);
11168: }
11170: /*@C
11171: MatCreateGraph - create a scalar matrix (that is a matrix with one vertex for each block vertex in the original matrix), for use in graph algorithms
11172: and possibly removes small values from the graph structure.
11174: Collective
11176: Input Parameters:
11177: + A - the matrix
11178: . sym - `PETSC_TRUE` indicates that the graph should be symmetrized
11179: . scale - `PETSC_TRUE` indicates that the graph edge weights should be symmetrically scaled with the diagonal entry
11180: - filter - filter value - < 0: does nothing; == 0: removes only 0.0 entries; otherwise: removes entries with abs(entries) <= value
11182: Output Parameter:
11183: . graph - the resulting graph
11185: Level: advanced
11187: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `PCGAMG`
11188: @*/
11189: PetscErrorCode MatCreateGraph(Mat A, PetscBool sym, PetscBool scale, PetscReal filter, Mat *graph)
11190: {
11191: PetscFunctionBegin;
11195: PetscAssertPointer(graph, 5);
11196: PetscUseTypeMethod(A, creategraph, sym, scale, filter, graph);
11197: PetscFunctionReturn(PETSC_SUCCESS);
11198: }
11200: /*@
11201: MatEliminateZeros - eliminate the nondiagonal zero entries in place from the nonzero structure of a sparse `Mat` in place,
11202: meaning the same memory is used for the matrix, and no new memory is allocated.
11204: Collective
11206: Input Parameters:
11207: + A - the matrix
11208: - keep - if for a given row of `A`, the diagonal coefficient is zero, indicates whether it should be left in the structure or eliminated as well
11210: Level: intermediate
11212: Developer Note:
11213: The entries in the sparse matrix data structure are shifted to fill in the unneeded locations in the data. Thus the end
11214: of the arrays in the data structure are unneeded.
11216: .seealso: [](ch_matrices), `Mat`, `MatCreate()`, `MatCreateGraph()`, `MatFilter()`
11217: @*/
11218: PetscErrorCode MatEliminateZeros(Mat A, PetscBool keep)
11219: {
11220: PetscFunctionBegin;
11222: PetscUseTypeMethod(A, eliminatezeros, keep);
11223: PetscFunctionReturn(PETSC_SUCCESS);
11224: }