Actual source code: tfs.c

  1: /*
  2:         Provides an interface to the Tufo-Fischer parallel direct solver
  3: */

  5: #include <petsc/private/pcimpl.h>
  6: #include <../src/mat/impls/aij/mpi/mpiaij.h>
  7: #include <../src/ksp/pc/impls/tfs/tfs.h>

  9: typedef struct {
 10:   xxt_ADT  xxt;
 11:   xyt_ADT  xyt;
 12:   Vec      b,xd,xo;
 13:   PetscInt nd;
 14: } PC_TFS;

 16: PetscErrorCode PCDestroy_TFS(PC pc)
 17: {
 18:   PC_TFS         *tfs = (PC_TFS*)pc->data;

 22:   /* free the XXT datastructures */
 23:   if (tfs->xxt) {
 24:     XXT_free(tfs->xxt);
 25:   }
 26:   if (tfs->xyt) {
 27:     XYT_free(tfs->xyt);
 28:   }
 29:   VecDestroy(&tfs->b);
 30:   VecDestroy(&tfs->xd);
 31:   VecDestroy(&tfs->xo);
 32:   PetscFree(pc->data);
 33:   return(0);
 34: }

 36: static PetscErrorCode PCApply_TFS_XXT(PC pc,Vec x,Vec y)
 37: {
 38:   PC_TFS            *tfs = (PC_TFS*)pc->data;
 39:   PetscScalar       *yy;
 40:   const PetscScalar *xx;
 41:   PetscErrorCode    ierr;

 44:   VecGetArrayRead(x,&xx);
 45:   VecGetArray(y,&yy);
 46:   XXT_solve(tfs->xxt,yy,(PetscScalar*)xx);
 47:   VecRestoreArrayRead(x,&xx);
 48:   VecRestoreArray(y,&yy);
 49:   return(0);
 50: }

 52: static PetscErrorCode PCApply_TFS_XYT(PC pc,Vec x,Vec y)
 53: {
 54:   PC_TFS            *tfs = (PC_TFS*)pc->data;
 55:   PetscScalar       *yy;
 56:   const PetscScalar *xx;
 57:   PetscErrorCode    ierr;

 60:   VecGetArrayRead(x,&xx);
 61:   VecGetArray(y,&yy);
 62:   XYT_solve(tfs->xyt,yy,(PetscScalar*)xx);
 63:   VecRestoreArrayRead(x,&xx);
 64:   VecRestoreArray(y,&yy);
 65:   return(0);
 66: }

 68: static PetscErrorCode PCTFSLocalMult_TFS(PC pc,PetscScalar *xin,PetscScalar *xout)
 69: {
 70:   PC_TFS         *tfs = (PC_TFS*)pc->data;
 71:   Mat            A    = pc->pmat;
 72:   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;

 76:   VecPlaceArray(tfs->b,xout);
 77:   VecPlaceArray(tfs->xd,xin);
 78:   VecPlaceArray(tfs->xo,xin+tfs->nd);
 79:   MatMult(a->A,tfs->xd,tfs->b);
 80:   MatMultAdd(a->B,tfs->xo,tfs->b,tfs->b);
 81:   VecResetArray(tfs->b);
 82:   VecResetArray(tfs->xd);
 83:   VecResetArray(tfs->xo);
 84:   return(0);
 85: }

 87: static PetscErrorCode PCSetUp_TFS(PC pc)
 88: {
 89:   PC_TFS         *tfs = (PC_TFS*)pc->data;
 90:   Mat            A    = pc->pmat;
 91:   Mat_MPIAIJ     *a   = (Mat_MPIAIJ*)A->data;
 93:   PetscInt       *localtoglobal,ncol,i;
 94:   PetscBool      ismpiaij;

 96:   /*
 97:   PetscBool      issymmetric;
 98:   Petsc Real tol = 0.0;
 99:   */

102:   if (A->cmap->N != A->rmap->N) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_SIZ,"matrix must be square");
103:   PetscObjectTypeCompare((PetscObject)pc->pmat,MATMPIAIJ,&ismpiaij);
104:   if (!ismpiaij) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Currently only supports MPIAIJ matrices");

106:   /* generate the local to global mapping */
107:   ncol = a->A->cmap->n + a->B->cmap->n;
108:   PetscMalloc1(ncol,&localtoglobal);
109:   for (i=0; i<a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1;
110:   for (i=0; i<a->B->cmap->n; i++) localtoglobal[i+a->A->cmap->n] = a->garray[i] + 1;

112:   /* generate the vectors needed for the local solves */
113:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->rmap->n,NULL,&tfs->b);
114:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->A->cmap->n,NULL,&tfs->xd);
115:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,a->B->cmap->n,NULL,&tfs->xo);
116:   tfs->nd = a->A->cmap->n;

118:   /*   MatIsSymmetric(A,tol,&issymmetric); */
119:   /*  if (issymmetric) { */
120:   PetscBarrier((PetscObject)pc);
121:   if (A->symmetric) {
122:     tfs->xxt       = XXT_new();
123:     XXT_factor(tfs->xxt,localtoglobal,A->rmap->n,ncol,(PetscErrorCode (*)(void*,PetscScalar*,PetscScalar*))PCTFSLocalMult_TFS,pc);
124:     pc->ops->apply = PCApply_TFS_XXT;
125:   } else {
126:     tfs->xyt       = XYT_new();
127:     XYT_factor(tfs->xyt,localtoglobal,A->rmap->n,ncol,(PetscErrorCode (*)(void*,PetscScalar*,PetscScalar*))PCTFSLocalMult_TFS,pc);
128:     pc->ops->apply = PCApply_TFS_XYT;
129:   }

131:   PetscFree(localtoglobal);
132:   return(0);
133: }

135: static PetscErrorCode PCSetFromOptions_TFS(PetscOptionItems *PetscOptionsObject,PC pc)
136: {
138:   return(0);
139: }
140: static PetscErrorCode PCView_TFS(PC pc,PetscViewer viewer)
141: {
143:   return(0);
144: }

146: /*MC
147:      PCTFS - A parallel direct solver intended for problems with very few unknowns (like the
148:          coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by
149:          its local matrix vector product.

151:    Implemented by  Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT

153:    Level: beginner

155:    Notes:
156:     Only implemented for the MPIAIJ matrices

158:     Only works on a solver object that lives on all of PETSC_COMM_WORLD!

160:     Only works for real numbers (is not built if PetscScalar is complex)

162: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC
163: M*/
164: PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc)
165: {
167:   PC_TFS         *tfs;
168:   PetscMPIInt    cmp;

171:   MPI_Comm_compare(PETSC_COMM_WORLD,PetscObjectComm((PetscObject)pc),&cmp);
172:   if (cmp != MPI_IDENT && cmp != MPI_CONGRUENT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"TFS only works with PETSC_COMM_WORLD objects");
173:   PetscNewLog(pc,&tfs);

175:   tfs->xxt = NULL;
176:   tfs->xyt = NULL;
177:   tfs->b   = NULL;
178:   tfs->xd  = NULL;
179:   tfs->xo  = NULL;
180:   tfs->nd  = 0;

182:   pc->ops->apply               = NULL;
183:   pc->ops->applytranspose      = NULL;
184:   pc->ops->setup               = PCSetUp_TFS;
185:   pc->ops->destroy             = PCDestroy_TFS;
186:   pc->ops->setfromoptions      = PCSetFromOptions_TFS;
187:   pc->ops->view                = PCView_TFS;
188:   pc->ops->applyrichardson     = NULL;
189:   pc->ops->applysymmetricleft  = NULL;
190:   pc->ops->applysymmetricright = NULL;
191:   pc->data                     = (void*)tfs;
192:   return(0);
193: }