Actual source code: lsc.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2:  #include <petsc/private/pcimpl.h>

  4: typedef struct {
  5:   PetscBool allocated;
  6:   PetscBool scalediag;
  7:   KSP       kspL;
  8:   Vec       scale;
  9:   Vec       x0,y0,x1;
 10:   Mat       L;             /* keep a copy to reuse when obtained with L = A10*A01 */
 11: } PC_LSC;

 13: static PetscErrorCode PCLSCAllocate_Private(PC pc)
 14: {
 15:   PC_LSC         *lsc = (PC_LSC*)pc->data;
 16:   Mat            A;

 20:   if (lsc->allocated) return(0);
 21:   KSPCreate(PetscObjectComm((PetscObject)pc),&lsc->kspL);
 22:   KSPSetErrorIfNotConverged(lsc->kspL,pc->erroriffailure);
 23:   PetscObjectIncrementTabLevel((PetscObject)lsc->kspL,(PetscObject)pc,1);
 24:   KSPSetType(lsc->kspL,KSPPREONLY);
 25:   KSPSetOptionsPrefix(lsc->kspL,((PetscObject)pc)->prefix);
 26:   KSPAppendOptionsPrefix(lsc->kspL,"lsc_");
 27:   MatSchurComplementGetSubMatrices(pc->mat,&A,NULL,NULL,NULL,NULL);
 28:   MatCreateVecs(A,&lsc->x0,&lsc->y0);
 29:   MatCreateVecs(pc->pmat,&lsc->x1,NULL);
 30:   if (lsc->scalediag) {
 31:     VecDuplicate(lsc->x0,&lsc->scale);
 32:   }
 33:   lsc->allocated = PETSC_TRUE;
 34:   return(0);
 35: }

 37: static PetscErrorCode PCSetUp_LSC(PC pc)
 38: {
 39:   PC_LSC         *lsc = (PC_LSC*)pc->data;
 40:   Mat            L,Lp,B,C;

 44:   PCLSCAllocate_Private(pc);
 45:   PetscObjectQuery((PetscObject)pc->mat,"LSC_L",(PetscObject*)&L);
 46:   if (!L) {PetscObjectQuery((PetscObject)pc->pmat,"LSC_L",(PetscObject*)&L);}
 47:   PetscObjectQuery((PetscObject)pc->pmat,"LSC_Lp",(PetscObject*)&Lp);
 48:   if (!Lp) {PetscObjectQuery((PetscObject)pc->mat,"LSC_Lp",(PetscObject*)&Lp);}
 49:   if (!L) {
 50:     MatSchurComplementGetSubMatrices(pc->mat,NULL,NULL,&B,&C,NULL);
 51:     if (!lsc->L) {
 52:       MatMatMult(C,B,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&lsc->L);
 53:     } else {
 54:       MatMatMult(C,B,MAT_REUSE_MATRIX,PETSC_DEFAULT,&lsc->L);
 55:     }
 56:     Lp = L = lsc->L;
 57:   }
 58:   if (lsc->scale) {
 59:     Mat Ap;
 60:     MatSchurComplementGetSubMatrices(pc->mat,NULL,&Ap,NULL,NULL,NULL);
 61:     MatGetDiagonal(Ap,lsc->scale); /* Should be the mass matrix, but we don't have plumbing for that yet */
 62:     VecReciprocal(lsc->scale);
 63:   }
 64:   KSPSetOperators(lsc->kspL,L,Lp);
 65:   KSPSetFromOptions(lsc->kspL);
 66:   return(0);
 67: }

 69: static PetscErrorCode PCApply_LSC(PC pc,Vec x,Vec y)
 70: {
 71:   PC_LSC         *lsc = (PC_LSC*)pc->data;
 72:   Mat            A,B,C;

 76:   MatSchurComplementGetSubMatrices(pc->mat,&A,NULL,&B,&C,NULL);
 77:   KSPSolve(lsc->kspL,x,lsc->x1);
 78:   MatMult(B,lsc->x1,lsc->x0);
 79:   if (lsc->scale) {
 80:     VecPointwiseMult(lsc->x0,lsc->x0,lsc->scale);
 81:   }
 82:   MatMult(A,lsc->x0,lsc->y0);
 83:   if (lsc->scale) {
 84:     VecPointwiseMult(lsc->y0,lsc->y0,lsc->scale);
 85:   }
 86:   MatMult(C,lsc->y0,lsc->x1);
 87:   KSPSolve(lsc->kspL,lsc->x1,y);
 88:   return(0);
 89: }

 91: static PetscErrorCode PCReset_LSC(PC pc)
 92: {
 93:   PC_LSC         *lsc = (PC_LSC*)pc->data;

 97:   VecDestroy(&lsc->x0);
 98:   VecDestroy(&lsc->y0);
 99:   VecDestroy(&lsc->x1);
100:   VecDestroy(&lsc->scale);
101:   KSPDestroy(&lsc->kspL);
102:   MatDestroy(&lsc->L);
103:   return(0);
104: }

106: static PetscErrorCode PCDestroy_LSC(PC pc)
107: {

111:   PCReset_LSC(pc);
112:   PetscFree(pc->data);
113:   return(0);
114: }

116: static PetscErrorCode PCSetFromOptions_LSC(PetscOptionItems *PetscOptionsObject,PC pc)
117: {
118:   PC_LSC         *lsc = (PC_LSC*)pc->data;

122:   PetscOptionsHead(PetscOptionsObject,"LSC options");
123:   {
124:     PetscOptionsBool("-pc_lsc_scale_diag","Use diagonal of velocity block (A) for scaling","None",lsc->scalediag,&lsc->scalediag,NULL);
125:   }
126:   PetscOptionsTail();
127:   return(0);
128: }

130: static PetscErrorCode PCView_LSC(PC pc,PetscViewer viewer)
131: {
132:   PC_LSC         *jac = (PC_LSC*)pc->data;
134:   PetscBool      iascii;

137:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
138:   if (iascii) {
139:     PetscViewerASCIIPushTab(viewer);
140:     KSPView(jac->kspL,viewer);
141:     PetscViewerASCIIPopTab(viewer);
142:   }
143:   return(0);
144: }

146: /*MC
147:      PCLSC - Preconditioning for Schur complements, based on Least Squares Commutators

149:    Options Database Key:
150: .    -pc_lsc_scale_diag - Use the diagonal of A for scaling

152:    Level: intermediate

154:    Notes:
155:    This preconditioner will normally be used with PCFieldSplit to precondition the Schur complement, but
156:    it can be used for any Schur complement system.  Consider the Schur complement

158: .vb
159:    S = A11 - A10 inv(A00) A01
160: .ve

162:    PCLSC currently doesn't do anything with A11, so let's assume it is 0.  The idea is that a good approximation to
163:    inv(S) is given by

165: .vb
166:    inv(A10 A01) A10 A00 A01 inv(A10 A01)
167: .ve

169:    The product A10 A01 can be computed for you, but you can provide it (this is
170:    usually more efficient anyway).  In the case of incompressible flow, A10 A10 is a Laplacian, call it L.  The current
171:    interface is to hang L and a preconditioning matrix Lp on the preconditioning matrix.

173:    If you had called KSPSetOperators(ksp,S,Sp), S should have type MATSCHURCOMPLEMENT and Sp can be any type you
174:    like (PCLSC doesn't use it directly) but should have matrices composed with it, under the names "LSC_L" and "LSC_Lp".
175:    For example, you might have setup code like this

177: .vb
178:    PetscObjectCompose((PetscObject)Sp,"LSC_L",(PetscObject)L);
179:    PetscObjectCompose((PetscObject)Sp,"LSC_Lp",(PetscObject)Lp);
180: .ve

182:    And then your Jacobian assembly would look like

184: .vb
185:    PetscObjectQuery((PetscObject)Sp,"LSC_L",(PetscObject*)&L);
186:    PetscObjectQuery((PetscObject)Sp,"LSC_Lp",(PetscObject*)&Lp);
187:    if (L) { assembly L }
188:    if (Lp) { assemble Lp }
189: .ve

191:    With this, you should be able to choose LSC preconditioning, using e.g. ML's algebraic multigrid to solve with L

193: .vb
194:    -fieldsplit_1_pc_type lsc -fieldsplit_1_lsc_pc_type ml
195: .ve

197:    Since we do not use the values in Sp, you can still put an assembled matrix there to use normal preconditioners.

199:    References:
200: +  1. - Elman, Howle, Shadid, Shuttleworth, and Tuminaro, Block preconditioners based on approximate commutators, 2006.
201: -  2. - Silvester, Elman, Kay, Wathen, Efficient preconditioning of the linearized Navier Stokes equations for incompressible flow, 2001.

203:    Concepts: physics based preconditioners, block preconditioners

205: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, Block_Preconditioners, PCFIELDSPLIT,
206:            PCFieldSplitGetSubKSP(), PCFieldSplitSetFields(), PCFieldSplitSetType(), PCFieldSplitSetIS(), PCFieldSplitSetSchurPre(),
207:            MatCreateSchurComplement()
208: M*/

210: PETSC_EXTERN PetscErrorCode PCCreate_LSC(PC pc)
211: {
212:   PC_LSC         *lsc;

216:   PetscNewLog(pc,&lsc);
217:   pc->data = (void*)lsc;

219:   pc->ops->apply           = PCApply_LSC;
220:   pc->ops->applytranspose  = 0;
221:   pc->ops->setup           = PCSetUp_LSC;
222:   pc->ops->reset           = PCReset_LSC;
223:   pc->ops->destroy         = PCDestroy_LSC;
224:   pc->ops->setfromoptions  = PCSetFromOptions_LSC;
225:   pc->ops->view            = PCView_LSC;
226:   pc->ops->applyrichardson = 0;
227:   return(0);
228: }