Actual source code: precon.c


  2: /*
  3:     The PC (preconditioner) interface routines, callable by users.
  4: */
  5: #include <petsc/private/pcimpl.h>
  6: #include <petscdm.h>

  8: /* Logging support */
  9: PetscClassId  PC_CLASSID;
 10: PetscLogEvent PC_SetUp, PC_SetUpOnBlocks, PC_Apply, PC_MatApply, PC_ApplyCoarse, PC_ApplyMultiple, PC_ApplySymmetricLeft;
 11: PetscLogEvent PC_ApplySymmetricRight, PC_ModifySubMatrices, PC_ApplyOnBlocks, PC_ApplyTransposeOnBlocks;
 12: PetscInt      PetscMGLevelId;

 14: PetscErrorCode PCGetDefaultType_Private(PC pc,const char *type[])
 15: {
 17:   PetscMPIInt    size;
 18:   PetscBool      hasop,flg1,flg2,set,flg3;

 21:   MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);
 22:   if (pc->pmat) {
 23:     MatHasOperation(pc->pmat,MATOP_GET_DIAGONAL_BLOCK,&hasop);
 24:     if (size == 1) {
 25:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ICC,&flg1);
 26:       MatGetFactorAvailable(pc->pmat,"petsc",MAT_FACTOR_ILU,&flg2);
 27:       MatIsSymmetricKnown(pc->pmat,&set,&flg3);
 28:       if (flg1 && (!flg2 || (set && flg3))) {
 29:         *type = PCICC;
 30:       } else if (flg2) {
 31:         *type = PCILU;
 32:       } else if (hasop) { /* likely is a parallel matrix run on one processor */
 33:         *type = PCBJACOBI;
 34:       } else {
 35:         *type = PCNONE;
 36:       }
 37:     } else {
 38:        if (hasop) {
 39:         *type = PCBJACOBI;
 40:       } else {
 41:         *type = PCNONE;
 42:       }
 43:     }
 44:   } else {
 45:     if (size == 1) {
 46:       *type = PCILU;
 47:     } else {
 48:       *type = PCBJACOBI;
 49:     }
 50:   }
 51:   return(0);
 52: }

 54: /*@
 55:    PCReset - Resets a PC context to the pcsetupcalled = 0 state and removes any allocated Vecs and Mats

 57:    Collective on PC

 59:    Input Parameter:
 60: .  pc - the preconditioner context

 62:    Level: developer

 64:    Notes:
 65:     This allows a PC to be reused for a different sized linear system but using the same options that have been previously set in the PC

 67: .seealso: PCCreate(), PCSetUp()
 68: @*/
 69: PetscErrorCode  PCReset(PC pc)
 70: {

 75:   if (pc->ops->reset) {
 76:     (*pc->ops->reset)(pc);
 77:   }
 78:   VecDestroy(&pc->diagonalscaleright);
 79:   VecDestroy(&pc->diagonalscaleleft);
 80:   MatDestroy(&pc->pmat);
 81:   MatDestroy(&pc->mat);

 83:   pc->setupcalled = 0;
 84:   return(0);
 85: }

 87: /*@C
 88:    PCDestroy - Destroys PC context that was created with PCCreate().

 90:    Collective on PC

 92:    Input Parameter:
 93: .  pc - the preconditioner context

 95:    Level: developer

 97: .seealso: PCCreate(), PCSetUp()
 98: @*/
 99: PetscErrorCode  PCDestroy(PC *pc)
100: {

104:   if (!*pc) return(0);
106:   if (--((PetscObject)(*pc))->refct > 0) {*pc = NULL; return(0);}

108:   PCReset(*pc);

110:   /* if memory was published with SAWs then destroy it */
111:   PetscObjectSAWsViewOff((PetscObject)*pc);
112:   if ((*pc)->ops->destroy) {(*(*pc)->ops->destroy)((*pc));}
113:   DMDestroy(&(*pc)->dm);
114:   PetscHeaderDestroy(pc);
115:   return(0);
116: }

118: /*@C
119:    PCGetDiagonalScale - Indicates if the preconditioner applies an additional left and right
120:       scaling as needed by certain time-stepping codes.

122:    Logically Collective on PC

124:    Input Parameter:
125: .  pc - the preconditioner context

127:    Output Parameter:
128: .  flag - PETSC_TRUE if it applies the scaling

130:    Level: developer

132:    Notes:
133:     If this returns PETSC_TRUE then the system solved via the Krylov method is
134: $           D M A D^{-1} y = D M b  for left preconditioning or
135: $           D A M D^{-1} z = D b for right preconditioning

137: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCSetDiagonalScale()
138: @*/
139: PetscErrorCode  PCGetDiagonalScale(PC pc,PetscBool  *flag)
140: {
144:   *flag = pc->diagonalscale;
145:   return(0);
146: }

148: /*@
149:    PCSetDiagonalScale - Indicates the left scaling to use to apply an additional left and right
150:       scaling as needed by certain time-stepping codes.

152:    Logically Collective on PC

154:    Input Parameters:
155: +  pc - the preconditioner context
156: -  s - scaling vector

158:    Level: intermediate

160:    Notes:
161:     The system solved via the Krylov method is
162: $           D M A D^{-1} y = D M b  for left preconditioning or
163: $           D A M D^{-1} z = D b for right preconditioning

165:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

167: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleRight(), PCGetDiagonalScale()
168: @*/
169: PetscErrorCode  PCSetDiagonalScale(PC pc,Vec s)
170: {

176:   pc->diagonalscale     = PETSC_TRUE;

178:   PetscObjectReference((PetscObject)s);
179:   VecDestroy(&pc->diagonalscaleleft);

181:   pc->diagonalscaleleft = s;

183:   VecDuplicate(s,&pc->diagonalscaleright);
184:   VecCopy(s,pc->diagonalscaleright);
185:   VecReciprocal(pc->diagonalscaleright);
186:   return(0);
187: }

189: /*@
190:    PCDiagonalScaleLeft - Scales a vector by the left scaling as needed by certain time-stepping codes.

192:    Logically Collective on PC

194:    Input Parameters:
195: +  pc - the preconditioner context
196: .  in - input vector
197: -  out - scaled vector (maybe the same as in)

199:    Level: intermediate

201:    Notes:
202:     The system solved via the Krylov method is
203: $           D M A D^{-1} y = D M b  for left preconditioning or
204: $           D A M D^{-1} z = D b for right preconditioning

206:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

208:    If diagonal scaling is turned off and in is not out then in is copied to out

210: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleSet(), PCDiagonalScaleRight(), PCDiagonalScale()
211: @*/
212: PetscErrorCode  PCDiagonalScaleLeft(PC pc,Vec in,Vec out)
213: {

220:   if (pc->diagonalscale) {
221:     VecPointwiseMult(out,pc->diagonalscaleleft,in);
222:   } else if (in != out) {
223:     VecCopy(in,out);
224:   }
225:   return(0);
226: }

228: /*@
229:    PCDiagonalScaleRight - Scales a vector by the right scaling as needed by certain time-stepping codes.

231:    Logically Collective on PC

233:    Input Parameters:
234: +  pc - the preconditioner context
235: .  in - input vector
236: -  out - scaled vector (maybe the same as in)

238:    Level: intermediate

240:    Notes:
241:     The system solved via the Krylov method is
242: $           D M A D^{-1} y = D M b  for left preconditioning or
243: $           D A M D^{-1} z = D b for right preconditioning

245:    PCDiagonalScaleLeft() scales a vector by D. PCDiagonalScaleRight() scales a vector by D^{-1}.

247:    If diagonal scaling is turned off and in is not out then in is copied to out

249: .seealso: PCCreate(), PCSetUp(), PCDiagonalScaleLeft(), PCDiagonalScaleSet(), PCDiagonalScale()
250: @*/
251: PetscErrorCode  PCDiagonalScaleRight(PC pc,Vec in,Vec out)
252: {

259:   if (pc->diagonalscale) {
260:     VecPointwiseMult(out,pc->diagonalscaleright,in);
261:   } else if (in != out) {
262:     VecCopy(in,out);
263:   }
264:   return(0);
265: }

267: /*@
268:    PCSetUseAmat - Sets a flag to indicate that when the preconditioner needs to apply (part of) the
269:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
270:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

272:    Logically Collective on PC

274:    Input Parameters:
275: +  pc - the preconditioner context
276: -  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

278:    Options Database Key:
279: .  -pc_use_amat <true,false>

281:    Notes:
282:    For the common case in which the linear system matrix and the matrix used to construct the
283:    preconditioner are identical, this routine is does nothing.

285:    Level: intermediate

287: .seealso: PCGetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
288: @*/
289: PetscErrorCode  PCSetUseAmat(PC pc,PetscBool flg)
290: {
293:   pc->useAmat = flg;
294:   return(0);
295: }

297: /*@
298:    PCSetErrorIfFailure - Causes PC to generate an error if a FPE, for example a zero pivot, is detected.

300:    Logically Collective on PC

302:    Input Parameters:
303: +  pc - iterative context obtained from PCCreate()
304: -  flg - PETSC_TRUE indicates you want the error generated

306:    Level: advanced

308:    Notes:
309:     Normally PETSc continues if a linear solver fails due to a failed setup of a preconditioner, you can call KSPGetConvergedReason() after a KSPSolve()
310:     to determine if it has converged or failed. Or use -ksp_error_if_not_converged to cause the program to terminate as soon as lack of convergence is
311:     detected.

313:     This is propagated into KSPs used by this PC, which then propagate it into PCs used by those KSPs

315: .seealso: PCGetInitialGuessNonzero(), PCSetInitialGuessKnoll(), PCGetInitialGuessKnoll()
316: @*/
317: PetscErrorCode  PCSetErrorIfFailure(PC pc,PetscBool flg)
318: {
322:   pc->erroriffailure = flg;
323:   return(0);
324: }

326: /*@
327:    PCGetUseAmat - Gets a flag to indicate that when the preconditioner needs to apply (part of) the
328:    operator during the preconditioning process it applies the Amat provided to TSSetRHSJacobian(),
329:    TSSetIJacobian(), SNESSetJacobian(), KSPSetOperator() or PCSetOperator() not the Pmat.

331:    Logically Collective on PC

333:    Input Parameter:
334: .  pc - the preconditioner context

336:    Output Parameter:
337: .  flg - PETSC_TRUE to use the Amat, PETSC_FALSE to use the Pmat (default is false)

339:    Notes:
340:    For the common case in which the linear system matrix and the matrix used to construct the
341:    preconditioner are identical, this routine is does nothing.

343:    Level: intermediate

345: .seealso: PCSetUseAmat(), PCBJACOBI, PGMG, PCFIELDSPLIT, PCCOMPOSITE
346: @*/
347: PetscErrorCode  PCGetUseAmat(PC pc,PetscBool *flg)
348: {
351:   *flg = pc->useAmat;
352:   return(0);
353: }

355: /*@
356:    PCCreate - Creates a preconditioner context.

358:    Collective

360:    Input Parameter:
361: .  comm - MPI communicator

363:    Output Parameter:
364: .  pc - location to put the preconditioner context

366:    Notes:
367:    The default preconditioner for sparse matrices is PCILU or PCICC with 0 fill on one process and block Jacobi with PCILU or PCICC
368:    in parallel. For dense matrices it is always PCNONE.

370:    Level: developer

372: .seealso: PCSetUp(), PCApply(), PCDestroy()
373: @*/
374: PetscErrorCode  PCCreate(MPI_Comm comm,PC *newpc)
375: {
376:   PC             pc;

381:   *newpc = NULL;
382:   PCInitializePackage();

384:   PetscHeaderCreate(pc,PC_CLASSID,"PC","Preconditioner","PC",comm,PCDestroy,PCView);

386:   pc->mat                  = NULL;
387:   pc->pmat                 = NULL;
388:   pc->setupcalled          = 0;
389:   pc->setfromoptionscalled = 0;
390:   pc->data                 = NULL;
391:   pc->diagonalscale        = PETSC_FALSE;
392:   pc->diagonalscaleleft    = NULL;
393:   pc->diagonalscaleright   = NULL;

395:   pc->modifysubmatrices  = NULL;
396:   pc->modifysubmatricesP = NULL;

398:   *newpc = pc;
399:   return(0);

401: }

403: /* -------------------------------------------------------------------------------*/

405: /*@
406:    PCApply - Applies the preconditioner to a vector.

408:    Collective on PC

410:    Input Parameters:
411: +  pc - the preconditioner context
412: -  x - input vector

414:    Output Parameter:
415: .  y - output vector

417:    Level: developer

419: .seealso: PCApplyTranspose(), PCApplyBAorAB()
420: @*/
421: PetscErrorCode  PCApply(PC pc,Vec x,Vec y)
422: {
424:   PetscInt       m,n,mv,nv;

430:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
431:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
432:   /* use pmat to check vector sizes since for KSPLSQR the pmat may be of a different size than mat */
433:   MatGetLocalSize(pc->pmat,&m,&n);
434:   VecGetLocalSize(x,&mv);
435:   VecGetLocalSize(y,&nv);
436:   /* check pmat * y = x is feasible */
437:   if (mv != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local rows %D does not equal input vector size %D",m,mv);
438:   if (nv != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Preconditioner number of local columns %D does not equal output vector size %D",n,nv);
439:   VecSetErrorIfLocked(y,3);

441:   PCSetUp(pc);
442:   if (!pc->ops->apply) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply");
443:   VecLockReadPush(x);
444:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
445:   (*pc->ops->apply)(pc,x,y);
446:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
447:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
448:   VecLockReadPop(x);
449:   return(0);
450: }

452: /*@
453:    PCMatApply - Applies the preconditioner to multiple vectors stored as a MATDENSE. Like PCApply(), Y and X must be different matrices.

455:    Collective on PC

457:    Input Parameters:
458: +  pc - the preconditioner context
459: -  X - block of input vectors

461:    Output Parameter:
462: .  Y - block of output vectors

464:    Level: developer

466: .seealso: PCApply(), KSPMatSolve()
467: @*/
468: PetscErrorCode  PCMatApply(PC pc,Mat X,Mat Y)
469: {
470:   Mat            A;
471:   Vec            cy, cx;
472:   PetscInt       m1, M1, m2, M2, n1, N1, n2, N2, m3, M3, n3, N3;
473:   PetscBool      match;

482:   if (Y == X) SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_IDN, "Y and X must be different matrices");
483:   PCGetOperators(pc, NULL, &A);
484:   MatGetLocalSize(A, &m3, &n3);
485:   MatGetLocalSize(X, &m2, &n2);
486:   MatGetLocalSize(Y, &m1, &n1);
487:   MatGetSize(A, &M3, &N3);
488:   MatGetSize(X, &M2, &N2);
489:   MatGetSize(Y, &M1, &N1);
490:   if (n1 != n2 || N1 != N2) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible number of columns between block of input vectors (n,N) = (%D,%D) and block of output vectors (n,N) = (%D,%D)", n2, N2, n1, N1);
491:   if (m2 != m3 || M2 != M3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of input vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m2, M2, m3, M3, n3, N3);
492:   if (m1 != n3 || M1 != N3) SETERRQ6(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Incompatible layout between block of output vectors (m,M) = (%D,%D) and Pmat (m,M)x(n,N) = (%D,%D)x(%D,%D)", m1, M1, m3, M3, n3, N3);
493:   PetscObjectBaseTypeCompareAny((PetscObject)Y, &match, MATSEQDENSE, MATMPIDENSE, "");
494:   if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of output vectors not stored in a dense Mat");
495:   PetscObjectBaseTypeCompareAny((PetscObject)X, &match, MATSEQDENSE, MATMPIDENSE, "");
496:   if (!match) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Provided block of input vectors not stored in a dense Mat");
497:   PCSetUp(pc);
498:   if (pc->ops->matapply) {
499:     PetscLogEventBegin(PC_MatApply, pc, X, Y, 0);
500:     (*pc->ops->matapply)(pc, X, Y);
501:     PetscLogEventEnd(PC_MatApply, pc, X, Y, 0);
502:   } else {
503:     PetscInfo1(pc, "PC type %s applying column by column\n", ((PetscObject)pc)->type_name);
504:     for (n1 = 0; n1 < N1; ++n1) {
505:       MatDenseGetColumnVecRead(X, n1, &cx);
506:       MatDenseGetColumnVecWrite(Y, n1, &cy);
507:       PCApply(pc, cx, cy);
508:       MatDenseRestoreColumnVecWrite(Y, n1, &cy);
509:       MatDenseRestoreColumnVecRead(X, n1, &cx);
510:     }
511:   }
512:   return(0);
513: }

515: /*@
516:    PCApplySymmetricLeft - Applies the left part of a symmetric preconditioner to a vector.

518:    Collective on PC

520:    Input Parameters:
521: +  pc - the preconditioner context
522: -  x - input vector

524:    Output Parameter:
525: .  y - output vector

527:    Notes:
528:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

530:    Level: developer

532: .seealso: PCApply(), PCApplySymmetricRight()
533: @*/
534: PetscErrorCode  PCApplySymmetricLeft(PC pc,Vec x,Vec y)
535: {

542:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
543:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
544:   PCSetUp(pc);
545:   if (!pc->ops->applysymmetricleft) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
546:   VecLockReadPush(x);
547:   PetscLogEventBegin(PC_ApplySymmetricLeft,pc,x,y,0);
548:   (*pc->ops->applysymmetricleft)(pc,x,y);
549:   PetscLogEventEnd(PC_ApplySymmetricLeft,pc,x,y,0);
550:   VecLockReadPop(x);
551:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
552:   return(0);
553: }

555: /*@
556:    PCApplySymmetricRight - Applies the right part of a symmetric preconditioner to a vector.

558:    Collective on PC

560:    Input Parameters:
561: +  pc - the preconditioner context
562: -  x - input vector

564:    Output Parameter:
565: .  y - output vector

567:    Level: developer

569:    Notes:
570:    Currently, this routine is implemented only for PCICC and PCJACOBI preconditioners.

572: .seealso: PCApply(), PCApplySymmetricLeft()
573: @*/
574: PetscErrorCode  PCApplySymmetricRight(PC pc,Vec x,Vec y)
575: {

582:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
583:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
584:   PCSetUp(pc);
585:   if (!pc->ops->applysymmetricright) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have left symmetric apply");
586:   VecLockReadPush(x);
587:   PetscLogEventBegin(PC_ApplySymmetricRight,pc,x,y,0);
588:   (*pc->ops->applysymmetricright)(pc,x,y);
589:   PetscLogEventEnd(PC_ApplySymmetricRight,pc,x,y,0);
590:   VecLockReadPop(x);
591:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
592:   return(0);
593: }

595: /*@
596:    PCApplyTranspose - Applies the transpose of preconditioner to a vector.

598:    Collective on PC

600:    Input Parameters:
601: +  pc - the preconditioner context
602: -  x - input vector

604:    Output Parameter:
605: .  y - output vector

607:    Notes:
608:     For complex numbers this applies the non-Hermitian transpose.

610:    Developer Notes:
611:     We need to implement a PCApplyHermitianTranspose()

613:    Level: developer

615: .seealso: PCApply(), PCApplyBAorAB(), PCApplyBAorABTranspose(), PCApplyTransposeExists()
616: @*/
617: PetscErrorCode  PCApplyTranspose(PC pc,Vec x,Vec y)
618: {

625:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
626:   if (pc->erroriffailure) {VecValidValues(x,2,PETSC_TRUE);}
627:   PCSetUp(pc);
628:   if (!pc->ops->applytranspose) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply transpose");
629:   VecLockReadPush(x);
630:   PetscLogEventBegin(PC_Apply,pc,x,y,0);
631:   (*pc->ops->applytranspose)(pc,x,y);
632:   PetscLogEventEnd(PC_Apply,pc,x,y,0);
633:   VecLockReadPop(x);
634:   if (pc->erroriffailure) {VecValidValues(y,3,PETSC_FALSE);}
635:   return(0);
636: }

638: /*@
639:    PCApplyTransposeExists - Test whether the preconditioner has a transpose apply operation

641:    Collective on PC

643:    Input Parameters:
644: .  pc - the preconditioner context

646:    Output Parameter:
647: .  flg - PETSC_TRUE if a transpose operation is defined

649:    Level: developer

651: .seealso: PCApplyTranspose()
652: @*/
653: PetscErrorCode  PCApplyTransposeExists(PC pc,PetscBool  *flg)
654: {
658:   if (pc->ops->applytranspose) *flg = PETSC_TRUE;
659:   else *flg = PETSC_FALSE;
660:   return(0);
661: }

663: /*@
664:    PCApplyBAorAB - Applies the preconditioner and operator to a vector. y = B*A*x or y = A*B*x.

666:    Collective on PC

668:    Input Parameters:
669: +  pc - the preconditioner context
670: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
671: .  x - input vector
672: -  work - work vector

674:    Output Parameter:
675: .  y - output vector

677:    Level: developer

679:    Notes:
680:     If the PC has had PCSetDiagonalScale() set then D M A D^{-1} for left preconditioning or  D A M D^{-1} is actually applied. Note that the
681:    specific KSPSolve() method must also be written to handle the post-solve "correction" for the diagonal scaling.

683: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorABTranspose()
684: @*/
685: PetscErrorCode  PCApplyBAorAB(PC pc,PCSide side,Vec x,Vec y,Vec work)
686: {

698:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
699:   if (side != PC_LEFT && side != PC_SYMMETRIC && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right, left, or symmetric");
700:   if (pc->diagonalscale && side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot include diagonal scaling with symmetric preconditioner application");
701:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}

703:   PCSetUp(pc);
704:   if (pc->diagonalscale) {
705:     if (pc->ops->applyBA) {
706:       Vec work2; /* this is expensive, but to fix requires a second work vector argument to PCApplyBAorAB() */
707:       VecDuplicate(x,&work2);
708:       PCDiagonalScaleRight(pc,x,work2);
709:       (*pc->ops->applyBA)(pc,side,work2,y,work);
710:       PCDiagonalScaleLeft(pc,y,y);
711:       VecDestroy(&work2);
712:     } else if (side == PC_RIGHT) {
713:       PCDiagonalScaleRight(pc,x,y);
714:       PCApply(pc,y,work);
715:       MatMult(pc->mat,work,y);
716:       PCDiagonalScaleLeft(pc,y,y);
717:     } else if (side == PC_LEFT) {
718:       PCDiagonalScaleRight(pc,x,y);
719:       MatMult(pc->mat,y,work);
720:       PCApply(pc,work,y);
721:       PCDiagonalScaleLeft(pc,y,y);
722:     } else if (side == PC_SYMMETRIC) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot provide diagonal scaling with symmetric application of preconditioner");
723:   } else {
724:     if (pc->ops->applyBA) {
725:       (*pc->ops->applyBA)(pc,side,x,y,work);
726:     } else if (side == PC_RIGHT) {
727:       PCApply(pc,x,work);
728:       MatMult(pc->mat,work,y);
729:     } else if (side == PC_LEFT) {
730:       MatMult(pc->mat,x,work);
731:       PCApply(pc,work,y);
732:     } else if (side == PC_SYMMETRIC) {
733:       /* There's an extra copy here; maybe should provide 2 work vectors instead? */
734:       PCApplySymmetricRight(pc,x,work);
735:       MatMult(pc->mat,work,y);
736:       VecCopy(y,work);
737:       PCApplySymmetricLeft(pc,work,y);
738:     }
739:   }
740:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
741:   return(0);
742: }

744: /*@
745:    PCApplyBAorABTranspose - Applies the transpose of the preconditioner
746:    and operator to a vector. That is, applies tr(B) * tr(A) with left preconditioning,
747:    NOT tr(B*A) = tr(A)*tr(B).

749:    Collective on PC

751:    Input Parameters:
752: +  pc - the preconditioner context
753: .  side - indicates the preconditioner side, one of PC_LEFT, PC_RIGHT, or PC_SYMMETRIC
754: .  x - input vector
755: -  work - work vector

757:    Output Parameter:
758: .  y - output vector

760:    Notes:
761:     this routine is used internally so that the same Krylov code can be used to solve A x = b and A' x = b, with a preconditioner
762:       defined by B'. This is why this has the funny form that it computes tr(B) * tr(A)

764:     Level: developer

766: .seealso: PCApply(), PCApplyTranspose(), PCApplyBAorAB()
767: @*/
768: PetscErrorCode  PCApplyBAorABTranspose(PC pc,PCSide side,Vec x,Vec y,Vec work)
769: {

777:   if (x == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"x and y must be different vectors");
778:   if (pc->erroriffailure) {VecValidValues(x,3,PETSC_TRUE);}
779:   if (pc->ops->applyBAtranspose) {
780:     (*pc->ops->applyBAtranspose)(pc,side,x,y,work);
781:     if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
782:     return(0);
783:   }
784:   if (side != PC_LEFT && side != PC_RIGHT) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Side must be right or left");

786:   PCSetUp(pc);
787:   if (side == PC_RIGHT) {
788:     PCApplyTranspose(pc,x,work);
789:     MatMultTranspose(pc->mat,work,y);
790:   } else if (side == PC_LEFT) {
791:     MatMultTranspose(pc->mat,x,work);
792:     PCApplyTranspose(pc,work,y);
793:   }
794:   /* add support for PC_SYMMETRIC */
795:   if (pc->erroriffailure) {VecValidValues(y,4,PETSC_FALSE);}
796:   return(0);
797: }

799: /* -------------------------------------------------------------------------------*/

801: /*@
802:    PCApplyRichardsonExists - Determines whether a particular preconditioner has a
803:    built-in fast application of Richardson's method.

805:    Not Collective

807:    Input Parameter:
808: .  pc - the preconditioner

810:    Output Parameter:
811: .  exists - PETSC_TRUE or PETSC_FALSE

813:    Level: developer

815: .seealso: PCApplyRichardson()
816: @*/
817: PetscErrorCode  PCApplyRichardsonExists(PC pc,PetscBool  *exists)
818: {
822:   if (pc->ops->applyrichardson) *exists = PETSC_TRUE;
823:   else *exists = PETSC_FALSE;
824:   return(0);
825: }

827: /*@
828:    PCApplyRichardson - Applies several steps of Richardson iteration with
829:    the particular preconditioner. This routine is usually used by the
830:    Krylov solvers and not the application code directly.

832:    Collective on PC

834:    Input Parameters:
835: +  pc  - the preconditioner context
836: .  b   - the right hand side
837: .  w   - one work vector
838: .  rtol - relative decrease in residual norm convergence criteria
839: .  abstol - absolute residual norm convergence criteria
840: .  dtol - divergence residual norm increase criteria
841: .  its - the number of iterations to apply.
842: -  guesszero - if the input x contains nonzero initial guess

844:    Output Parameters:
845: +  outits - number of iterations actually used (for SOR this always equals its)
846: .  reason - the reason the apply terminated
847: -  y - the solution (also contains initial guess if guesszero is PETSC_FALSE

849:    Notes:
850:    Most preconditioners do not support this function. Use the command
851:    PCApplyRichardsonExists() to determine if one does.

853:    Except for the multigrid PC this routine ignores the convergence tolerances
854:    and always runs for the number of iterations

856:    Level: developer

858: .seealso: PCApplyRichardsonExists()
859: @*/
860: PetscErrorCode  PCApplyRichardson(PC pc,Vec b,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool guesszero,PetscInt *outits,PCRichardsonConvergedReason *reason)
861: {

869:   if (b == y) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_IDN,"b and y must be different vectors");
870:   PCSetUp(pc);
871:   if (!pc->ops->applyrichardson) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC does not have apply richardson");
872:   (*pc->ops->applyrichardson)(pc,b,y,w,rtol,abstol,dtol,its,guesszero,outits,reason);
873:   return(0);
874: }

876: /*@
877:    PCSetFailedReason - Sets the reason a PCSetUp() failed or PC_NOERROR if it did not fail

879:    Logically Collective on PC

881:    Input Parameters:
882: +  pc - the preconditioner context
883: -  reason - the reason it failedx

885:    Level: advanced

887: .seealso: PCCreate(), PCApply(), PCDestroy(), PCFailedReason
888: @*/
889: PetscErrorCode PCSetFailedReason(PC pc,PCFailedReason reason)
890: {
892:   pc->failedreason = reason;
893:   return(0);
894: }

896: /*@
897:    PCGetFailedReason - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail

899:    Logically Collective on PC

901:    Input Parameter:
902: .  pc - the preconditioner context

904:    Output Parameter:
905: .  reason - the reason it failed

907:    Level: advanced

909:    Notes: This is the maximum over reason over all ranks in the PC communicator. It is only valid after
910:    a call KSPCheckDot() or  KSPCheckNorm() inside a KSPSolve(). It is not valid immediately after a PCSetUp()
911:    or PCApply(), then use PCGetFailedReasonRank()

913: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReasonRank(), PCSetFailedReason()
914: @*/
915: PetscErrorCode PCGetFailedReason(PC pc,PCFailedReason *reason)
916: {
918:   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
919:   else *reason = pc->failedreason;
920:   return(0);
921: }

923: /*@
924:    PCGetFailedReasonRank - Gets the reason a PCSetUp() failed or PC_NOERROR if it did not fail on this MPI rank

926:   Not Collective on PC

928:    Input Parameter:
929: .  pc - the preconditioner context

931:    Output Parameter:
932: .  reason - the reason it failed

934:    Notes:
935:      Different ranks may have different reasons or no reason, see PCGetFailedReason()

937:    Level: advanced

939: .seealso: PCCreate(), PCApply(), PCDestroy(), PCGetFailedReason(), PCSetFailedReason()
940: @*/
941: PetscErrorCode PCGetFailedReasonRank(PC pc,PCFailedReason *reason)
942: {
944:   if (pc->setupcalled < 0) *reason = (PCFailedReason)pc->setupcalled;
945:   else *reason = pc->failedreason;
946:   return(0);
947: }

949: /*  Next line needed to deactivate KSP_Solve logging */
950: #include <petsc/private/kspimpl.h>

952: /*
953:       a setupcall of 0 indicates never setup,
954:                      1 indicates has been previously setup
955:                     -1 indicates a PCSetUp() was attempted and failed
956: */
957: /*@
958:    PCSetUp - Prepares for the use of a preconditioner.

960:    Collective on PC

962:    Input Parameter:
963: .  pc - the preconditioner context

965:    Level: developer

967: .seealso: PCCreate(), PCApply(), PCDestroy()
968: @*/
969: PetscErrorCode  PCSetUp(PC pc)
970: {
971:   PetscErrorCode   ierr;
972:   const char       *def;
973:   PetscObjectState matstate, matnonzerostate;

977:   if (!pc->mat) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be set first");

979:   if (pc->setupcalled && pc->reusepreconditioner) {
980:     PetscInfo(pc,"Leaving PC with identical preconditioner since reuse preconditioner is set\n");
981:     return(0);
982:   }

984:   PetscObjectStateGet((PetscObject)pc->pmat,&matstate);
985:   MatGetNonzeroState(pc->pmat,&matnonzerostate);
986:   if (!pc->setupcalled) {
987:     PetscInfo(pc,"Setting up PC for first time\n");
988:     pc->flag = DIFFERENT_NONZERO_PATTERN;
989:   } else if (matstate == pc->matstate) {
990:     PetscInfo(pc,"Leaving PC with identical preconditioner since operator is unchanged\n");
991:     return(0);
992:   } else {
993:     if (matnonzerostate > pc->matnonzerostate) {
994:       PetscInfo(pc,"Setting up PC with different nonzero pattern\n");
995:       pc->flag = DIFFERENT_NONZERO_PATTERN;
996:     } else {
997:       PetscInfo(pc,"Setting up PC with same nonzero pattern\n");
998:       pc->flag = SAME_NONZERO_PATTERN;
999:     }
1000:   }
1001:   pc->matstate        = matstate;
1002:   pc->matnonzerostate = matnonzerostate;

1004:   if (!((PetscObject)pc)->type_name) {
1005:     PCGetDefaultType_Private(pc,&def);
1006:     PCSetType(pc,def);
1007:   }

1009:   MatSetErrorIfFailure(pc->pmat,pc->erroriffailure);
1010:   MatSetErrorIfFailure(pc->mat,pc->erroriffailure);
1011:   PetscLogEventBegin(PC_SetUp,pc,0,0,0);
1012:   if (pc->ops->setup) {
1013:     /* do not log solves and applications of preconditioners while constructing preconditioners; perhaps they should be logged separately from the regular solves */
1014:     KSPInitializePackage();
1015:     PetscLogEventDeactivatePush(KSP_Solve);
1016:     PetscLogEventDeactivatePush(PC_Apply);
1017:     (*pc->ops->setup)(pc);
1018:     PetscLogEventDeactivatePop(KSP_Solve);
1019:     PetscLogEventDeactivatePop(PC_Apply);
1020:   }
1021:   PetscLogEventEnd(PC_SetUp,pc,0,0,0);
1022:   if (!pc->setupcalled) pc->setupcalled = 1;
1023:   return(0);
1024: }

1026: /*@
1027:    PCSetUpOnBlocks - Sets up the preconditioner for each block in
1028:    the block Jacobi, block Gauss-Seidel, and overlapping Schwarz
1029:    methods.

1031:    Collective on PC

1033:    Input Parameters:
1034: .  pc - the preconditioner context

1036:    Level: developer

1038: .seealso: PCCreate(), PCApply(), PCDestroy(), PCSetUp()
1039: @*/
1040: PetscErrorCode  PCSetUpOnBlocks(PC pc)
1041: {

1046:   if (!pc->ops->setuponblocks) return(0);
1047:   PetscLogEventBegin(PC_SetUpOnBlocks,pc,0,0,0);
1048:   (*pc->ops->setuponblocks)(pc);
1049:   PetscLogEventEnd(PC_SetUpOnBlocks,pc,0,0,0);
1050:   return(0);
1051: }

1053: /*@C
1054:    PCSetModifySubMatrices - Sets a user-defined routine for modifying the
1055:    submatrices that arise within certain subdomain-based preconditioners.
1056:    The basic submatrices are extracted from the preconditioner matrix as
1057:    usual; the user can then alter these (for example, to set different boundary
1058:    conditions for each submatrix) before they are used for the local solves.

1060:    Logically Collective on PC

1062:    Input Parameters:
1063: +  pc - the preconditioner context
1064: .  func - routine for modifying the submatrices
1065: -  ctx - optional user-defined context (may be null)

1067:    Calling sequence of func:
1068: $     func (PC pc,PetscInt nsub,IS *row,IS *col,Mat *submat,void *ctx);

1070: +  row - an array of index sets that contain the global row numbers
1071:          that comprise each local submatrix
1072: .  col - an array of index sets that contain the global column numbers
1073:          that comprise each local submatrix
1074: .  submat - array of local submatrices
1075: -  ctx - optional user-defined context for private data for the
1076:          user-defined func routine (may be null)

1078:    Notes:
1079:    PCSetModifySubMatrices() MUST be called before KSPSetUp() and
1080:    KSPSolve().

1082:    A routine set by PCSetModifySubMatrices() is currently called within
1083:    the block Jacobi (PCBJACOBI) and additive Schwarz (PCASM)
1084:    preconditioners.  All other preconditioners ignore this routine.

1086:    Level: advanced

1088: .seealso: PCModifySubMatrices()
1089: @*/
1090: PetscErrorCode  PCSetModifySubMatrices(PC pc,PetscErrorCode (*func)(PC,PetscInt,const IS[],const IS[],Mat[],void*),void *ctx)
1091: {
1094:   pc->modifysubmatrices  = func;
1095:   pc->modifysubmatricesP = ctx;
1096:   return(0);
1097: }

1099: /*@C
1100:    PCModifySubMatrices - Calls an optional user-defined routine within
1101:    certain preconditioners if one has been set with PCSetModifySubMatrices().

1103:    Collective on PC

1105:    Input Parameters:
1106: +  pc - the preconditioner context
1107: .  nsub - the number of local submatrices
1108: .  row - an array of index sets that contain the global row numbers
1109:          that comprise each local submatrix
1110: .  col - an array of index sets that contain the global column numbers
1111:          that comprise each local submatrix
1112: .  submat - array of local submatrices
1113: -  ctx - optional user-defined context for private data for the
1114:          user-defined routine (may be null)

1116:    Output Parameter:
1117: .  submat - array of local submatrices (the entries of which may
1118:             have been modified)

1120:    Notes:
1121:    The user should NOT generally call this routine, as it will
1122:    automatically be called within certain preconditioners (currently
1123:    block Jacobi, additive Schwarz) if set.

1125:    The basic submatrices are extracted from the preconditioner matrix
1126:    as usual; the user can then alter these (for example, to set different
1127:    boundary conditions for each submatrix) before they are used for the
1128:    local solves.

1130:    Level: developer

1132: .seealso: PCSetModifySubMatrices()
1133: @*/
1134: PetscErrorCode  PCModifySubMatrices(PC pc,PetscInt nsub,const IS row[],const IS col[],Mat submat[],void *ctx)
1135: {

1140:   if (!pc->modifysubmatrices) return(0);
1141:   PetscLogEventBegin(PC_ModifySubMatrices,pc,0,0,0);
1142:   (*pc->modifysubmatrices)(pc,nsub,row,col,submat,ctx);
1143:   PetscLogEventEnd(PC_ModifySubMatrices,pc,0,0,0);
1144:   return(0);
1145: }

1147: /*@
1148:    PCSetOperators - Sets the matrix associated with the linear system and
1149:    a (possibly) different one associated with the preconditioner.

1151:    Logically Collective on PC

1153:    Input Parameters:
1154: +  pc - the preconditioner context
1155: .  Amat - the matrix that defines the linear system
1156: -  Pmat - the matrix to be used in constructing the preconditioner, usually the same as Amat.

1158:    Notes:
1159:     Passing a NULL for Amat or Pmat removes the matrix that is currently used.

1161:     If you wish to replace either Amat or Pmat but leave the other one untouched then
1162:     first call KSPGetOperators() to get the one you wish to keep, call PetscObjectReference()
1163:     on it and then pass it back in in your call to KSPSetOperators().

1165:    More Notes about Repeated Solution of Linear Systems:
1166:    PETSc does NOT reset the matrix entries of either Amat or Pmat
1167:    to zero after a linear solve; the user is completely responsible for
1168:    matrix assembly.  See the routine MatZeroEntries() if desiring to
1169:    zero all elements of a matrix.

1171:    Level: intermediate

1173: .seealso: PCGetOperators(), MatZeroEntries()
1174:  @*/
1175: PetscErrorCode  PCSetOperators(PC pc,Mat Amat,Mat Pmat)
1176: {
1177:   PetscErrorCode   ierr;
1178:   PetscInt         m1,n1,m2,n2;

1186:   if (pc->setupcalled && pc->mat && pc->pmat && Amat && Pmat) {
1187:     MatGetLocalSize(Amat,&m1,&n1);
1188:     MatGetLocalSize(pc->mat,&m2,&n2);
1189:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Amat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1190:     MatGetLocalSize(Pmat,&m1,&n1);
1191:     MatGetLocalSize(pc->pmat,&m2,&n2);
1192:     if (m1 != m2 || n1 != n2) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot change local size of Pmat after use old sizes %D %D new sizes %D %D",m2,n2,m1,n1);
1193:   }

1195:   if (Pmat != pc->pmat) {
1196:     /* changing the operator that defines the preconditioner thus reneed to clear current states so new preconditioner is built */
1197:     pc->matnonzerostate = -1;
1198:     pc->matstate        = -1;
1199:   }

1201:   /* reference first in case the matrices are the same */
1202:   if (Amat) {PetscObjectReference((PetscObject)Amat);}
1203:   MatDestroy(&pc->mat);
1204:   if (Pmat) {PetscObjectReference((PetscObject)Pmat);}
1205:   MatDestroy(&pc->pmat);
1206:   pc->mat  = Amat;
1207:   pc->pmat = Pmat;
1208:   return(0);
1209: }

1211: /*@
1212:    PCSetReusePreconditioner - reuse the current preconditioner even if the operator in the preconditioner has changed.

1214:    Logically Collective on PC

1216:    Input Parameters:
1217: +  pc - the preconditioner context
1218: -  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1220:     Level: intermediate

1222: .seealso: PCGetOperators(), MatZeroEntries(), PCGetReusePreconditioner(), KSPSetReusePreconditioner()
1223:  @*/
1224: PetscErrorCode  PCSetReusePreconditioner(PC pc,PetscBool flag)
1225: {
1229:   pc->reusepreconditioner = flag;
1230:   return(0);
1231: }

1233: /*@
1234:    PCGetReusePreconditioner - Determines if the PC reuses the current preconditioner even if the operator in the preconditioner has changed.

1236:    Not Collective

1238:    Input Parameter:
1239: .  pc - the preconditioner context

1241:    Output Parameter:
1242: .  flag - PETSC_TRUE do not compute a new preconditioner, PETSC_FALSE do compute a new preconditioner

1244:    Level: intermediate

1246: .seealso: PCGetOperators(), MatZeroEntries(), PCSetReusePreconditioner()
1247:  @*/
1248: PetscErrorCode  PCGetReusePreconditioner(PC pc,PetscBool *flag)
1249: {
1253:   *flag = pc->reusepreconditioner;
1254:   return(0);
1255: }

1257: /*@
1258:    PCGetOperators - Gets the matrix associated with the linear system and
1259:    possibly a different one associated with the preconditioner.

1261:    Not collective, though parallel Mats are returned if the PC is parallel

1263:    Input Parameter:
1264: .  pc - the preconditioner context

1266:    Output Parameters:
1267: +  Amat - the matrix defining the linear system
1268: -  Pmat - the matrix from which the preconditioner is constructed, usually the same as Amat.

1270:    Level: intermediate

1272:    Notes:
1273:     Does not increase the reference count of the matrices, so you should not destroy them

1275:    Alternative usage: If the operators have NOT been set with KSP/PCSetOperators() then the operators
1276:       are created in PC and returned to the user. In this case, if both operators
1277:       mat and pmat are requested, two DIFFERENT operators will be returned. If
1278:       only one is requested both operators in the PC will be the same (i.e. as
1279:       if one had called KSP/PCSetOperators() with the same argument for both Mats).
1280:       The user must set the sizes of the returned matrices and their type etc just
1281:       as if the user created them with MatCreate(). For example,

1283: $         KSP/PCGetOperators(ksp/pc,&Amat,NULL); is equivalent to
1284: $           set size, type, etc of Amat

1286: $         MatCreate(comm,&mat);
1287: $         KSP/PCSetOperators(ksp/pc,Amat,Amat);
1288: $         PetscObjectDereference((PetscObject)mat);
1289: $           set size, type, etc of Amat

1291:      and

1293: $         KSP/PCGetOperators(ksp/pc,&Amat,&Pmat); is equivalent to
1294: $           set size, type, etc of Amat and Pmat

1296: $         MatCreate(comm,&Amat);
1297: $         MatCreate(comm,&Pmat);
1298: $         KSP/PCSetOperators(ksp/pc,Amat,Pmat);
1299: $         PetscObjectDereference((PetscObject)Amat);
1300: $         PetscObjectDereference((PetscObject)Pmat);
1301: $           set size, type, etc of Amat and Pmat

1303:     The rational for this support is so that when creating a TS, SNES, or KSP the hierarchy
1304:     of underlying objects (i.e. SNES, KSP, PC, Mat) and their livespans can be completely
1305:     managed by the top most level object (i.e. the TS, SNES, or KSP). Another way to look
1306:     at this is when you create a SNES you do not NEED to create a KSP and attach it to
1307:     the SNES object (the SNES object manages it for you). Similarly when you create a KSP
1308:     you do not need to attach a PC to it (the KSP object manages the PC object for you).
1309:     Thus, why should YOU have to create the Mat and attach it to the SNES/KSP/PC, when
1310:     it can be created for you?

1312: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperatorsSet()
1313: @*/
1314: PetscErrorCode  PCGetOperators(PC pc,Mat *Amat,Mat *Pmat)
1315: {

1320:   if (Amat) {
1321:     if (!pc->mat) {
1322:       if (pc->pmat && !Pmat) {  /* Apmat has been set, but user did not request it, so use for Amat */
1323:         pc->mat = pc->pmat;
1324:         PetscObjectReference((PetscObject)pc->mat);
1325:       } else {                  /* both Amat and Pmat are empty */
1326:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->mat);
1327:         if (!Pmat) { /* user did NOT request Pmat, so make same as Amat */
1328:           pc->pmat = pc->mat;
1329:           PetscObjectReference((PetscObject)pc->pmat);
1330:         }
1331:       }
1332:     }
1333:     *Amat = pc->mat;
1334:   }
1335:   if (Pmat) {
1336:     if (!pc->pmat) {
1337:       if (pc->mat && !Amat) {    /* Amat has been set but was not requested, so use for pmat */
1338:         pc->pmat = pc->mat;
1339:         PetscObjectReference((PetscObject)pc->pmat);
1340:       } else {
1341:         MatCreate(PetscObjectComm((PetscObject)pc),&pc->pmat);
1342:         if (!Amat) { /* user did NOT request Amat, so make same as Pmat */
1343:           pc->mat = pc->pmat;
1344:           PetscObjectReference((PetscObject)pc->mat);
1345:         }
1346:       }
1347:     }
1348:     *Pmat = pc->pmat;
1349:   }
1350:   return(0);
1351: }

1353: /*@C
1354:    PCGetOperatorsSet - Determines if the matrix associated with the linear system and
1355:    possibly a different one associated with the preconditioner have been set in the PC.

1357:    Not collective, though the results on all processes should be the same

1359:    Input Parameter:
1360: .  pc - the preconditioner context

1362:    Output Parameters:
1363: +  mat - the matrix associated with the linear system was set
1364: -  pmat - matrix associated with the preconditioner was set, usually the same

1366:    Level: intermediate

1368: .seealso: PCSetOperators(), KSPGetOperators(), KSPSetOperators(), PCGetOperators()
1369: @*/
1370: PetscErrorCode  PCGetOperatorsSet(PC pc,PetscBool  *mat,PetscBool  *pmat)
1371: {
1374:   if (mat) *mat = (pc->mat)  ? PETSC_TRUE : PETSC_FALSE;
1375:   if (pmat) *pmat = (pc->pmat) ? PETSC_TRUE : PETSC_FALSE;
1376:   return(0);
1377: }

1379: /*@
1380:    PCFactorGetMatrix - Gets the factored matrix from the
1381:    preconditioner context.  This routine is valid only for the LU,
1382:    incomplete LU, Cholesky, and incomplete Cholesky methods.

1384:    Not Collective on PC though Mat is parallel if PC is parallel

1386:    Input Parameters:
1387: .  pc - the preconditioner context

1389:    Output parameters:
1390: .  mat - the factored matrix

1392:    Level: advanced

1394:    Notes:
1395:     Does not increase the reference count for the matrix so DO NOT destroy it

1397: @*/
1398: PetscErrorCode  PCFactorGetMatrix(PC pc,Mat *mat)
1399: {

1405:   if (pc->ops->getfactoredmatrix) {
1406:     (*pc->ops->getfactoredmatrix)(pc,mat);
1407:   } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PC type does not support getting factor matrix");
1408:   return(0);
1409: }

1411: /*@C
1412:    PCSetOptionsPrefix - Sets the prefix used for searching for all
1413:    PC options in the database.

1415:    Logically Collective on PC

1417:    Input Parameters:
1418: +  pc - the preconditioner context
1419: -  prefix - the prefix string to prepend to all PC option requests

1421:    Notes:
1422:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1423:    The first character of all runtime options is AUTOMATICALLY the
1424:    hyphen.

1426:    Level: advanced

1428: .seealso: PCAppendOptionsPrefix(), PCGetOptionsPrefix()
1429: @*/
1430: PetscErrorCode  PCSetOptionsPrefix(PC pc,const char prefix[])
1431: {

1436:   PetscObjectSetOptionsPrefix((PetscObject)pc,prefix);
1437:   return(0);
1438: }

1440: /*@C
1441:    PCAppendOptionsPrefix - Appends to the prefix used for searching for all
1442:    PC options in the database.

1444:    Logically Collective on PC

1446:    Input Parameters:
1447: +  pc - the preconditioner context
1448: -  prefix - the prefix string to prepend to all PC option requests

1450:    Notes:
1451:    A hyphen (-) must NOT be given at the beginning of the prefix name.
1452:    The first character of all runtime options is AUTOMATICALLY the
1453:    hyphen.

1455:    Level: advanced

1457: .seealso: PCSetOptionsPrefix(), PCGetOptionsPrefix()
1458: @*/
1459: PetscErrorCode  PCAppendOptionsPrefix(PC pc,const char prefix[])
1460: {

1465:   PetscObjectAppendOptionsPrefix((PetscObject)pc,prefix);
1466:   return(0);
1467: }

1469: /*@C
1470:    PCGetOptionsPrefix - Gets the prefix used for searching for all
1471:    PC options in the database.

1473:    Not Collective

1475:    Input Parameters:
1476: .  pc - the preconditioner context

1478:    Output Parameters:
1479: .  prefix - pointer to the prefix string used, is returned

1481:    Notes:
1482:     On the fortran side, the user should pass in a string 'prifix' of
1483:    sufficient length to hold the prefix.

1485:    Level: advanced

1487: .seealso: PCSetOptionsPrefix(), PCAppendOptionsPrefix()
1488: @*/
1489: PetscErrorCode  PCGetOptionsPrefix(PC pc,const char *prefix[])
1490: {

1496:   PetscObjectGetOptionsPrefix((PetscObject)pc,prefix);
1497:   return(0);
1498: }

1500: /*
1501:    Indicates the right hand side will be changed by KSPSolve(), this occurs for a few
1502:   preconditioners including BDDC and Eisentat that transform the equations before applying
1503:   the Krylov methods
1504: */
1505: PETSC_INTERN PetscErrorCode  PCPreSolveChangeRHS(PC pc,PetscBool *change)
1506: {

1512:   *change = PETSC_FALSE;
1513:   PetscTryMethod(pc,"PCPreSolveChangeRHS_C",(PC,PetscBool*),(pc,change));
1514:   return(0);
1515: }

1517: /*@
1518:    PCPreSolve - Optional pre-solve phase, intended for any
1519:    preconditioner-specific actions that must be performed before
1520:    the iterative solve itself.

1522:    Collective on PC

1524:    Input Parameters:
1525: +  pc - the preconditioner context
1526: -  ksp - the Krylov subspace context

1528:    Level: developer

1530:    Sample of Usage:
1531: .vb
1532:     PCPreSolve(pc,ksp);
1533:     KSPSolve(ksp,b,x);
1534:     PCPostSolve(pc,ksp);
1535: .ve

1537:    Notes:
1538:    The pre-solve phase is distinct from the PCSetUp() phase.

1540:    KSPSolve() calls this directly, so is rarely called by the user.

1542: .seealso: PCPostSolve()
1543: @*/
1544: PetscErrorCode PCPreSolve(PC pc,KSP ksp)
1545: {
1547:   Vec            x,rhs;

1552:   pc->presolvedone++;
1553:   if (pc->presolvedone > 2) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot embed PCPreSolve() more than twice");
1554:   KSPGetSolution(ksp,&x);
1555:   KSPGetRhs(ksp,&rhs);

1557:   if (pc->ops->presolve) {
1558:     (*pc->ops->presolve)(pc,ksp,rhs,x);
1559:   } else if (pc->presolve) {
1560:     (pc->presolve)(pc,ksp);
1561:   }
1562:   return(0);
1563: }

1565: /*@C
1566:    PCSetPreSolve - Sets function PCPreSolve() which is intended for any
1567:    preconditioner-specific actions that must be performed before
1568:    the iterative solve itself.

1570:    Logically Collective on pc

1572:    Input Parameters:
1573: +   pc - the preconditioner object
1574: -   presolve - the function to call before the solve

1576:    Calling sequence of presolve:
1577: $  func(PC pc,KSP ksp)

1579: +  pc - the PC context
1580: -  ksp - the KSP context

1582:    Level: developer

1584: .seealso: PC, PCSetUp(), PCPreSolve()
1585: @*/
1586: PetscErrorCode PCSetPreSolve(PC pc,PetscErrorCode (*presolve)(PC,KSP))
1587: {
1590:   pc->presolve = presolve;
1591:   return(0);
1592: }

1594: /*@
1595:    PCPostSolve - Optional post-solve phase, intended for any
1596:    preconditioner-specific actions that must be performed after
1597:    the iterative solve itself.

1599:    Collective on PC

1601:    Input Parameters:
1602: +  pc - the preconditioner context
1603: -  ksp - the Krylov subspace context

1605:    Sample of Usage:
1606: .vb
1607:     PCPreSolve(pc,ksp);
1608:     KSPSolve(ksp,b,x);
1609:     PCPostSolve(pc,ksp);
1610: .ve

1612:    Note:
1613:    KSPSolve() calls this routine directly, so it is rarely called by the user.

1615:    Level: developer

1617: .seealso: PCPreSolve(), KSPSolve()
1618: @*/
1619: PetscErrorCode  PCPostSolve(PC pc,KSP ksp)
1620: {
1622:   Vec            x,rhs;

1627:   pc->presolvedone--;
1628:   KSPGetSolution(ksp,&x);
1629:   KSPGetRhs(ksp,&rhs);
1630:   if (pc->ops->postsolve) {
1631:      (*pc->ops->postsolve)(pc,ksp,rhs,x);
1632:   }
1633:   return(0);
1634: }

1636: /*@C
1637:   PCLoad - Loads a PC that has been stored in binary  with PCView().

1639:   Collective on PetscViewer

1641:   Input Parameters:
1642: + newdm - the newly loaded PC, this needs to have been created with PCCreate() or
1643:            some related function before a call to PCLoad().
1644: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen()

1646:    Level: intermediate

1648:   Notes:
1649:    The type is determined by the data in the file, any type set into the PC before this call is ignored.

1651:   Notes for advanced users:
1652:   Most users should not need to know the details of the binary storage
1653:   format, since PCLoad() and PCView() completely hide these details.
1654:   But for anyone who's interested, the standard binary matrix storage
1655:   format is
1656: .vb
1657:      has not yet been determined
1658: .ve

1660: .seealso: PetscViewerBinaryOpen(), PCView(), MatLoad(), VecLoad()
1661: @*/
1662: PetscErrorCode  PCLoad(PC newdm, PetscViewer viewer)
1663: {
1665:   PetscBool      isbinary;
1666:   PetscInt       classid;
1667:   char           type[256];

1672:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1673:   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");

1675:   PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);
1676:   if (classid != PC_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not PC next in file");
1677:   PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);
1678:   PCSetType(newdm, type);
1679:   if (newdm->ops->load) {
1680:     (*newdm->ops->load)(newdm,viewer);
1681:   }
1682:   return(0);
1683: }

1685: #include <petscdraw.h>
1686: #if defined(PETSC_HAVE_SAWS)
1687: #include <petscviewersaws.h>
1688: #endif

1690: /*@C
1691:    PCViewFromOptions - View from Options

1693:    Collective on PC

1695:    Input Parameters:
1696: +  A - the PC context
1697: .  obj - Optional object
1698: -  name - command line option

1700:    Level: intermediate
1701: .seealso:  PC, PCView, PetscObjectViewFromOptions(), PCCreate()
1702: @*/
1703: PetscErrorCode  PCViewFromOptions(PC A,PetscObject obj,const char name[])
1704: {

1709:   PetscObjectViewFromOptions((PetscObject)A,obj,name);
1710:   return(0);
1711: }

1713: /*@C
1714:    PCView - Prints the PC data structure.

1716:    Collective on PC

1718:    Input Parameters:
1719: +  PC - the PC context
1720: -  viewer - optional visualization context

1722:    Note:
1723:    The available visualization contexts include
1724: +     PETSC_VIEWER_STDOUT_SELF - standard output (default)
1725: -     PETSC_VIEWER_STDOUT_WORLD - synchronized standard
1726:          output where only the first processor opens
1727:          the file.  All other processors send their
1728:          data to the first processor to print.

1730:    The user can open an alternative visualization contexts with
1731:    PetscViewerASCIIOpen() (output to a specified file).

1733:    Level: developer

1735: .seealso: KSPView(), PetscViewerASCIIOpen()
1736: @*/
1737: PetscErrorCode  PCView(PC pc,PetscViewer viewer)
1738: {
1739:   PCType         cstr;
1741:   PetscBool      iascii,isstring,isbinary,isdraw;
1742: #if defined(PETSC_HAVE_SAWS)
1743:   PetscBool      issaws;
1744: #endif

1748:   if (!viewer) {
1749:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)pc),&viewer);
1750:   }

1754:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
1755:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);
1756:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
1757:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
1758: #if defined(PETSC_HAVE_SAWS)
1759:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws);
1760: #endif

1762:   if (iascii) {
1763:     PetscObjectPrintClassNamePrefixType((PetscObject)pc,viewer);
1764:     if (!pc->setupcalled) {
1765:       PetscViewerASCIIPrintf(viewer,"  PC has not been set up so information may be incomplete\n");
1766:     }
1767:     if (pc->ops->view) {
1768:       PetscViewerASCIIPushTab(viewer);
1769:       (*pc->ops->view)(pc,viewer);
1770:       PetscViewerASCIIPopTab(viewer);
1771:     }
1772:     if (pc->mat) {
1773:       PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO);
1774:       if (pc->pmat == pc->mat) {
1775:         PetscViewerASCIIPrintf(viewer,"  linear system matrix = precond matrix:\n");
1776:         PetscViewerASCIIPushTab(viewer);
1777:         MatView(pc->mat,viewer);
1778:         PetscViewerASCIIPopTab(viewer);
1779:       } else {
1780:         if (pc->pmat) {
1781:           PetscViewerASCIIPrintf(viewer,"  linear system matrix followed by preconditioner matrix:\n");
1782:         } else {
1783:           PetscViewerASCIIPrintf(viewer,"  linear system matrix:\n");
1784:         }
1785:         PetscViewerASCIIPushTab(viewer);
1786:         MatView(pc->mat,viewer);
1787:         if (pc->pmat) {MatView(pc->pmat,viewer);}
1788:         PetscViewerASCIIPopTab(viewer);
1789:       }
1790:       PetscViewerPopFormat(viewer);
1791:     }
1792:   } else if (isstring) {
1793:     PCGetType(pc,&cstr);
1794:     PetscViewerStringSPrintf(viewer," PCType: %-7.7s",cstr);
1795:     if (pc->ops->view) {(*pc->ops->view)(pc,viewer);}
1796:     if (pc->mat) {MatView(pc->mat,viewer);}
1797:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1798:   } else if (isbinary) {
1799:     PetscInt    classid = PC_FILE_CLASSID;
1800:     MPI_Comm    comm;
1801:     PetscMPIInt rank;
1802:     char        type[256];

1804:     PetscObjectGetComm((PetscObject)pc,&comm);
1805:     MPI_Comm_rank(comm,&rank);
1806:     if (rank == 0) {
1807:       PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT);
1808:       PetscStrncpy(type,((PetscObject)pc)->type_name,256);
1809:       PetscViewerBinaryWrite(viewer,type,256,PETSC_CHAR);
1810:     }
1811:     if (pc->ops->view) {
1812:       (*pc->ops->view)(pc,viewer);
1813:     }
1814:   } else if (isdraw) {
1815:     PetscDraw draw;
1816:     char      str[25];
1817:     PetscReal x,y,bottom,h;
1818:     PetscInt  n;

1820:     PetscViewerDrawGetDraw(viewer,0,&draw);
1821:     PetscDrawGetCurrentPoint(draw,&x,&y);
1822:     if (pc->mat) {
1823:       MatGetSize(pc->mat,&n,NULL);
1824:       PetscSNPrintf(str,25,"PC: %s (%D)",((PetscObject)pc)->type_name,n);
1825:     } else {
1826:       PetscSNPrintf(str,25,"PC: %s",((PetscObject)pc)->type_name);
1827:     }
1828:     PetscDrawStringBoxed(draw,x,y,PETSC_DRAW_RED,PETSC_DRAW_BLACK,str,NULL,&h);
1829:     bottom = y - h;
1830:     PetscDrawPushCurrentPoint(draw,x,bottom);
1831:     if (pc->ops->view) {
1832:       (*pc->ops->view)(pc,viewer);
1833:     }
1834:     PetscDrawPopCurrentPoint(draw);
1835: #if defined(PETSC_HAVE_SAWS)
1836:   } else if (issaws) {
1837:     PetscMPIInt rank;

1839:     PetscObjectName((PetscObject)pc);
1840:     MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
1841:     if (!((PetscObject)pc)->amsmem && rank == 0) {
1842:       PetscObjectViewSAWs((PetscObject)pc,viewer);
1843:     }
1844:     if (pc->mat) {MatView(pc->mat,viewer);}
1845:     if (pc->pmat && pc->pmat != pc->mat) {MatView(pc->pmat,viewer);}
1846: #endif
1847:   }
1848:   return(0);
1849: }

1851: /*@C
1852:   PCRegister -  Adds a method to the preconditioner package.

1854:    Not collective

1856:    Input Parameters:
1857: +  name_solver - name of a new user-defined solver
1858: -  routine_create - routine to create method context

1860:    Notes:
1861:    PCRegister() may be called multiple times to add several user-defined preconditioners.

1863:    Sample usage:
1864: .vb
1865:    PCRegister("my_solver", MySolverCreate);
1866: .ve

1868:    Then, your solver can be chosen with the procedural interface via
1869: $     PCSetType(pc,"my_solver")
1870:    or at runtime via the option
1871: $     -pc_type my_solver

1873:    Level: advanced

1875: .seealso: PCRegisterAll()
1876: @*/
1877: PetscErrorCode  PCRegister(const char sname[],PetscErrorCode (*function)(PC))
1878: {

1882:   PCInitializePackage();
1883:   PetscFunctionListAdd(&PCList,sname,function);
1884:   return(0);
1885: }

1887: static PetscErrorCode MatMult_PC(Mat A,Vec X,Vec Y)
1888: {
1889:   PC             pc;

1893:   MatShellGetContext(A,&pc);
1894:   PCApply(pc,X,Y);
1895:   return(0);
1896: }

1898: /*@
1899:     PCComputeOperator - Computes the explicit preconditioned operator.

1901:     Collective on PC

1903:     Input Parameters:
1904: +   pc - the preconditioner object
1905: -   mattype - the matrix type to be used for the operator

1907:     Output Parameter:
1908: .   mat - the explicit preconditioned operator

1910:     Notes:
1911:     This computation is done by applying the operators to columns of the identity matrix.
1912:     This routine is costly in general, and is recommended for use only with relatively small systems.
1913:     Currently, this routine uses a dense matrix format when mattype == NULL

1915:     Level: advanced

1917: .seealso: KSPComputeOperator(), MatType

1919: @*/
1920: PetscErrorCode  PCComputeOperator(PC pc,MatType mattype,Mat *mat)
1921: {
1923:   PetscInt       N,M,m,n;
1924:   Mat            A,Apc;

1929:   PCGetOperators(pc,&A,NULL);
1930:   MatGetLocalSize(A,&m,&n);
1931:   MatGetSize(A,&M,&N);
1932:   MatCreateShell(PetscObjectComm((PetscObject)pc),m,n,M,N,pc,&Apc);
1933:   MatShellSetOperation(Apc,MATOP_MULT,(void (*)(void))MatMult_PC);
1934:   MatComputeOperator(Apc,mattype,mat);
1935:   MatDestroy(&Apc);
1936:   return(0);
1937: }

1939: /*@
1940:    PCSetCoordinates - sets the coordinates of all the nodes on the local process

1942:    Collective on PC

1944:    Input Parameters:
1945: +  pc - the solver context
1946: .  dim - the dimension of the coordinates 1, 2, or 3
1947: .  nloc - the blocked size of the coordinates array
1948: -  coords - the coordinates array

1950:    Level: intermediate

1952:    Notes:
1953:    coords is an array of the dim coordinates for the nodes on
1954:    the local processor, of size dim*nloc.
1955:    If there are 108 equation on a processor
1956:    for a displacement finite element discretization of elasticity (so
1957:    that there are nloc = 36 = 108/3 nodes) then the array must have 108
1958:    double precision values (ie, 3 * 36).  These x y z coordinates
1959:    should be ordered for nodes 0 to N-1 like so: [ 0.x, 0.y, 0.z, 1.x,
1960:    ... , N-1.z ].

1962: .seealso: MatSetNearNullSpace()
1963: @*/
1964: PetscErrorCode PCSetCoordinates(PC pc, PetscInt dim, PetscInt nloc, PetscReal coords[])
1965: {

1971:   PetscTryMethod(pc,"PCSetCoordinates_C",(PC,PetscInt,PetscInt,PetscReal*),(pc,dim,nloc,coords));
1972:   return(0);
1973: }

1975: /*@
1976:    PCGetInterpolations - Gets interpolation matrices for all levels (except level 0)

1978:    Logically Collective on PC

1980:    Input Parameter:
1981: .  pc - the precondition context

1983:    Output Parameters:
1984: +  num_levels - the number of levels
1985: -  interpolations - the interpolation matrices (size of num_levels-1)

1987:    Level: advanced

1989: .keywords: MG, GAMG, BoomerAMG, multigrid, interpolation, level

1991: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetInterpolation(), PCGetCoarseOperators()
1992: @*/
1993: PetscErrorCode PCGetInterpolations(PC pc,PetscInt *num_levels,Mat *interpolations[])
1994: {

2001:   PetscUseMethod(pc,"PCGetInterpolations_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,interpolations));
2002:   return(0);
2003: }

2005: /*@
2006:    PCGetCoarseOperators - Gets coarse operator matrices for all levels (except the finest level)

2008:    Logically Collective on PC

2010:    Input Parameter:
2011: .  pc - the precondition context

2013:    Output Parameters:
2014: +  num_levels - the number of levels
2015: -  coarseOperators - the coarse operator matrices (size of num_levels-1)

2017:    Level: advanced

2019: .keywords: MG, GAMG, BoomerAMG, get, multigrid, interpolation, level

2021: .seealso: PCMGGetRestriction(), PCMGSetInterpolation(), PCMGGetRScale(), PCMGGetInterpolation(), PCGetInterpolations()
2022: @*/
2023: PetscErrorCode PCGetCoarseOperators(PC pc,PetscInt *num_levels,Mat *coarseOperators[])
2024: {

2031:   PetscUseMethod(pc,"PCGetCoarseOperators_C",(PC,PetscInt*,Mat*[]),(pc,num_levels,coarseOperators));
2032:   return(0);
2033: }