Actual source code: mpibaij.h

  1: #pragma once
  2: #include <../src/mat/impls/baij/seq/baij.h>
  3: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  4: #include <petsc/private/hashmapi.h>

  6: #if defined(PETSC_USE_CTABLE)
  7:   #define PETSCTABLE PetscHMapI
  8: #else
  9:   #define PETSCTABLE PetscInt *
 10: #endif

 12: #define MPIBAIJHEADER \
 13:   PetscInt   *rangebs;                            /* rmap->range/bs */ \
 14:   PetscInt    rstartbs, rendbs, cstartbs, cendbs; /* map values / bs  */ \
 15:   Mat         A, B;                               /* local submatrices: A (diag part), B (off-diag part) */ \
 16:   PetscMPIInt size;                               /* size of communicator */ \
 17:   PetscMPIInt rank;                               /* rank of proc in communicator */ \
 18:   PetscInt    bs2;                                /* block size, bs2 = bs*bs */ \
 19:   PetscInt    Mbs, Nbs;                           /* number block rows/cols in matrix; M/bs, N/bs */ \
 20:   PetscInt    mbs, nbs;                           /* number block rows/cols on processor; m/bs, n/bs */ \
 21: \
 22:   /* The following variables are used for matrix assembly */ \
 23: \
 24:   PetscBool    donotstash;              /* if 1, off processor entries dropped */ \
 25:   PetscBool    subset_off_proc_entries; /* PETSC_TRUE if assembly will always communicate a subset of the entries communicated the first time */ \
 26:   MPI_Request *send_waits;              /* array of send requests */ \
 27:   MPI_Request *recv_waits;              /* array of receive requests */ \
 28:   PetscInt     nsends, nrecvs;          /* numbers of sends and receives */ \
 29:   MatScalar   *svalues, *rvalues;       /* sending and receiving data */ \
 30:   PetscInt     rmax;                    /* maximum message length */ \
 31:   PETSCTABLE   colmap;                  /* local col number of off-diag col */ \
 32: \
 33:   PetscInt *garray; /* work array */ \
 34: \
 35:   /* The following variable is used by blocked matrix assembly */ \
 36:   MatScalar *barray; /* Block array of size bs2 */ \
 37: \
 38:   /* The following variables are used for matrix-vector products */ \
 39: \
 40:   Vec        lvec;        /* local vector */ \
 41:   VecScatter Mvctx;       /* scatter context for vector */ \
 42:   PetscBool  roworiented; /* if true, row-oriented input, default true */ \
 43: \
 44:   /* The following variables are for MatGetRow() */ \
 45: \
 46:   PetscInt    *rowindices;   /* column indices for row */ \
 47:   PetscScalar *rowvalues;    /* nonzero values in row */ \
 48:   PetscBool    getrowactive; /* indicates MatGetRow(), not restored */ \
 49: \
 50:   /* Some variables to make MatSetValues and others more efficient */ \
 51:   PetscInt    rstart_bs, rend_bs; \
 52:   PetscInt    cstart_bs, cend_bs; \
 53:   PetscInt   *ht; /* Hash table to speed up matrix assembly */ \
 54:   MatScalar **hd; /* Hash table data */ \
 55:   PetscInt    ht_size; \
 56:   PetscInt    ht_total_ct, ht_insert_ct; /* Hash table statistics */ \
 57:   PetscBool   ht_flag;                   /* Flag to indicate if hash tables are used */ \
 58:   double      ht_fact;                   /* Factor to determine the HT size */ \
 59: \
 60:   PetscInt   setvalueslen;  /* only used for single precision computations */ \
 61:   MatScalar *setvaluescopy; /* area double precision values in MatSetValuesXXX() are copied*/ \
 62:                             /* before calling MatSetValuesXXX_MPIBAIJ_MatScalar() */ \
 63:   PetscBool      ijonly;    /* used in  MatCreateSubMatrices_MPIBAIJ_local() for getting ij structure only */ \
 64:   struct _MatOps cops

 66: typedef struct {
 67:   MPIBAIJHEADER;
 68: } Mat_MPIBAIJ;

 70: PETSC_INTERN PetscErrorCode MatView_MPIBAIJ(Mat, PetscViewer);
 71: PETSC_INTERN PetscErrorCode MatLoad_MPIBAIJ(Mat, PetscViewer);
 72: PETSC_INTERN PetscErrorCode MatView_MPIBAIJ_Binary(Mat, PetscViewer);
 73: PETSC_INTERN PetscErrorCode MatLoad_MPIBAIJ_Binary(Mat, PetscViewer);

 75: PETSC_INTERN PetscErrorCode MatCreateColmap_MPIBAIJ_Private(Mat);
 76: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIBAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
 77: PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIBAIJ_local(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *, PetscBool);
 78: PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIBAIJ_Private(Mat, IS, IS, PetscInt, MatReuse, Mat *, PetscBool);
 79: PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIBAIJ(Mat, MPI_Comm, MatReuse, Mat *);
 80: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIBAIJ(Mat, PetscInt, IS[], PetscInt);
 81: PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIBAIJ_Once(Mat, PetscInt, IS *);
 82: PETSC_INTERN PetscErrorCode MatMPIBAIJSetPreallocation_MPIBAIJ(Mat B, PetscInt bs, PetscInt d_nz, const PetscInt *d_nnz, PetscInt o_nz, const PetscInt *o_nnz);
 83: PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIBAIJ(Mat, const PetscInt *, Mat, const PetscInt *, PetscInt *);

 85: PETSC_INTERN PetscErrorCode MatConjugate_SeqBAIJ(Mat);