Actual source code: mpidense.h
petsc-3.12.5 2020-03-29
2: #include <../src/mat/impls/dense/seq/dense.h>
4: /* Data stuctures for basic parallel dense matrix */
6: typedef struct { /* used by MatMatMult_MPIDense_MPIDense() */
7: Mat Ae,Be,Ce; /* matrix in Elemental format */
8: PetscErrorCode (*destroy)(Mat);
9: } Mat_MatMultDense;
11: typedef struct { /* used by MatTransposeMatMult_MPIDense_MPIDense() */
12: PetscScalar *sendbuf,*atbarray;
13: PetscMPIInt *recvcounts;
14: PetscErrorCode (*destroy)(Mat);
15: PetscMPIInt tag;
16: } Mat_TransMatMultDense;
18: typedef struct { /* used by MatMatTransposeMult_MPIDense_MPIDense() */
19: PetscScalar *buf[2];
20: PetscMPIInt tag;
21: PetscMPIInt *recvcounts;
22: PetscMPIInt *recvdispls;
23: PetscErrorCode (*destroy)(Mat);
24: PetscInt alg; /* algorithm used */
25: } Mat_MatTransMultDense;
27: typedef struct {
28: PetscInt nvec; /* this is the n size for the vector one multiplies with */
29: Mat A; /* local submatrix */
30: PetscMPIInt size; /* size of communicator */
31: PetscMPIInt rank; /* rank of proc in communicator */
33: /* The following variables are used for matrix assembly */
34: PetscBool donotstash; /* Flag indicationg if values should be stashed */
35: MPI_Request *send_waits; /* array of send requests */
36: MPI_Request *recv_waits; /* array of receive requests */
37: PetscInt nsends,nrecvs; /* numbers of sends and receives */
38: PetscScalar *svalues,*rvalues; /* sending and receiving data */
39: PetscInt rmax; /* maximum message length */
41: /* The following variables are used for matrix-vector products */
42: Vec lvec; /* local vector */
43: VecScatter Mvctx; /* scatter context for vector */
44: PetscBool roworiented; /* if true, row oriented input (default) */
46: Mat_MatTransMatMult *atb; /* used by MatTransposeMatMult_MPIAIJ_MPIDense */
47: Mat_TransMatMultDense *atbdense; /* used by MatTransposeMatMult_MPIDense_MPIDense */
48: Mat_MatMultDense *abdense; /* used by MatMatMult_MPIDense_MPIDense */
49: Mat_MatTransMultDense *abtdense; /* used by MatMatTransposeMult_MPIDense_MPIDense */
50: } Mat_MPIDense;
52: PETSC_INTERN PetscErrorCode MatLoad_MPIDense(Mat,PetscViewer);
53: PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIDense(Mat);
54: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIDense(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
55: PETSC_INTERN PetscErrorCode MatEqual_MPIDense(Mat,Mat,PetscBool*);
56: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIDense_MPIDense(Mat,Mat,PetscReal,Mat*);
57: PETSC_INTERN PetscErrorCode MatMatMult_MPIAIJ_MPIDense(Mat,Mat,MatReuse,PetscReal,Mat*);
58: PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIDense(Mat,Mat,PetscReal,Mat*);
59: PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIDense(Mat,Mat,Mat);
60: PETSC_INTERN PetscErrorCode MatTransposeMatMult_MPIDense_MPIDense(Mat,Mat,MatReuse,PetscReal,Mat*);
61: PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIDense_MPIDense(Mat,Mat,PetscReal,Mat*);
62: PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIDense_MPIDense(Mat,Mat,Mat);
64: #if defined(PETSC_HAVE_ELEMENTAL)
65: PETSC_INTERN PetscErrorCode MatMatMult_MPIDense_MPIDense(Mat,Mat,MatReuse,PetscReal,Mat*);
66: PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIDense_MPIDense(Mat,Mat,Mat);
67: #endif