Actual source code: tfs.c

petsc-3.11.4 2019-09-28
Report Typos and Errors
  1: /*
  2:         Provides an interface to the Tufo-Fischer parallel direct solver
  3: */

  5:  #include <petsc/private/pcimpl.h>
  6:  #include <../src/mat/impls/aij/mpi/mpiaij.h>
  7:  #include <../src/ksp/pc/impls/tfs/tfs.h>

  9: typedef struct {
 10:   xxt_ADT  xxt;
 11:   xyt_ADT  xyt;
 12:   Vec      b,xd,xo;
 13:   PetscInt nd;
 14: } PC_TFS;

 16: PetscErrorCode PCDestroy_TFS(PC pc)
 17: {
 18:   PC_TFS         *tfs = (PC_TFS*)pc->data;

 22:   /* free the XXT datastructures */
 23:   if (tfs->xxt) {
 24:     XXT_free(tfs->xxt);
 25:   }
 26:   if (tfs->xyt) {
 27:     XYT_free(tfs->xyt);
 28:   }
 29:   VecDestroy(&tfs->b);
 30:   VecDestroy(&tfs->xd);
 31:   VecDestroy(&tfs->xo);
 32:   PetscFree(pc->data);
 33:   return(0);
 34: }

 36: static PetscErrorCode PCApply_TFS_XXT(PC pc,Vec x,Vec y)
 37: {
 38:   PC_TFS            *tfs = (PC_TFS*)pc->data;
 39:   PetscScalar       *yy;
 40:   const PetscScalar *xx;
 41:   PetscErrorCode    ierr;

 44:   VecGetArrayRead(x,&xx);
 45:   VecGetArray(y,&yy);
 46:   XXT_solve(tfs->xxt,yy,(PetscScalar*)xx);
 47:   VecRestoreArrayRead(x,&xx);
 48:   VecRestoreArray(y,&yy);
 49:   return(0);
 50: }

 52: static PetscErrorCode PCApply_TFS_XYT(PC pc,Vec x,Vec y)
 53: {
 54:   PC_TFS            *tfs = (PC_TFS*)pc->data;
 55:   PetscScalar       *yy;
 56:   const PetscScalar *xx;
 57:   PetscErrorCode    ierr;

 60:   VecGetArrayRead(x,&xx);
 61:   VecGetArray(y,&yy);
 62:   XYT_solve(tfs->xyt,yy,(PetscScalar*)xx);
 63:   VecRestoreArrayRead(x,&xx);
 64:   VecRestoreArray(y,&yy);
 65:   return(0);
 66: }

 68: static PetscErrorCode PCTFSLocalMult_TFS(PC pc,PetscScalar *xin,PetscScalar *xout)
 69: {
 70:   PC_TFS         *tfs = (PC_TFS*)pc->data;
 71:   Mat            A    = pc->pmat;
 72:   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;

 76:   VecPlaceArray(tfs->b,xout);
 77:   VecPlaceArray(tfs->xd,xin);
 78:   VecPlaceArray(tfs->xo,xin+tfs->nd);
 79:   MatMult(a->A,tfs->xd,tfs->b);
 80:   MatMultAdd(a->B,tfs->xo,tfs->b,tfs->b);
 81:   VecResetArray(tfs->b);
 82:   VecResetArray(tfs->xd);
 83:   VecResetArray(tfs->xo);
 84:   return(0);
 85: }

 87: static PetscErrorCode PCSetUp_TFS(PC pc)
 88: {
 89:   PC_TFS         *tfs = (PC_TFS*)pc->data;
 90:   Mat            A    = pc->pmat;
 91:   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
 93:   PetscInt       *localtoglobal,ncol,i;
 94:   PetscBool      ismpiaij;

 96:   /*
 97:   PetscBool      issymmetric;
 98:   Petsc Real tol = 0.0;
 99:   */

102:   if (A->cmap->N != A->rmap->N) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_SIZ,"matrix must be square");
103:   PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPIAIJ,&ismpiaij);
104:   if (!ismpiaij) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Currently only supports MPIAIJ matrices");

106:   /* generate the local to global mapping */
107:   ncol = a->A->cmap->n + a->B->cmap->n;
108:   PetscMalloc1(ncol,&localtoglobal);
109:   for (i=0; i<a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1;
110:   for (i=0; i<a->B->cmap->n; i++) localtoglobal[i+a->A->cmap->n] = a->garray[i] + 1;

112:   /* generate the vectors needed for the local solves */
113:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->rmap->n,NULL,&tfs->b);
114:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->cmap->n,NULL,&tfs->xd);
115:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->B->cmap->n,NULL,&tfs->xo);
116:   tfs->nd = a->A->cmap->n;


119:   /*   MatIsSymmetric(A,tol,&issymmetric); */
120:   /*  if (issymmetric) { */
121:   PetscBarrier((PetscObject)pc);
122:   if (A->symmetric) {
123:     tfs->xxt       = XXT_new();
124:     XXT_factor(tfs->xxt,localtoglobal,A->rmap->n,ncol,(PetscErrorCode (*)(void*,PetscScalar*,PetscScalar*))PCTFSLocalMult_TFS,pc);
125:     pc->ops->apply = PCApply_TFS_XXT;
126:   } else {
127:     tfs->xyt       = XYT_new();
128:     XYT_factor(tfs->xyt,localtoglobal,A->rmap->n,ncol,(PetscErrorCode (*)(void*,PetscScalar*,PetscScalar*))PCTFSLocalMult_TFS,pc);
129:     pc->ops->apply = PCApply_TFS_XYT;
130:   }

132:   PetscFree(localtoglobal);
133:   return(0);
134: }

136: static PetscErrorCode PCSetFromOptions_TFS(PetscOptionItems *PetscOptionsObject,PC pc)
137: {
139:   return(0);
140: }
141: static PetscErrorCode PCView_TFS(PC pc,PetscViewer viewer)
142: {
144:   return(0);
145: }

147: /*MC
148:      PCTFS - A parallel direct solver intended for problems with very few unknowns (like the
149:          coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by
150:          its local matrix vector product.

152:    Implemented by  Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT

154:    Level: beginner

156:    Notes:
157:     Only implemented for the MPIAIJ matrices

159:     Only works on a solver object that lives on all of PETSC_COMM_WORLD!

161:     Only works for real numbers (is not built if PetscScalar is complex)

163: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC
164: M*/
165: PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc)
166: {
168:   PC_TFS         *tfs;
169:   PetscMPIInt    cmp;

172:   MPI_Comm_compare(PETSC_COMM_WORLD,PetscObjectComm((PetscObject)pc),&cmp);
173:   if (cmp != MPI_IDENT && cmp != MPI_CONGRUENT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"TFS only works with PETSC_COMM_WORLD objects");
174:   PetscNewLog(pc,&tfs);

176:   tfs->xxt = 0;
177:   tfs->xyt = 0;
178:   tfs->b   = 0;
179:   tfs->xd  = 0;
180:   tfs->xo  = 0;
181:   tfs->nd  = 0;

183:   pc->ops->apply               = 0;
184:   pc->ops->applytranspose      = 0;
185:   pc->ops->setup               = PCSetUp_TFS;
186:   pc->ops->destroy             = PCDestroy_TFS;
187:   pc->ops->setfromoptions      = PCSetFromOptions_TFS;
188:   pc->ops->view                = PCView_TFS;
189:   pc->ops->applyrichardson     = 0;
190:   pc->ops->applysymmetricleft  = 0;
191:   pc->ops->applysymmetricright = 0;
192:   pc->data                     = (void*)tfs;
193:   return(0);
194: }