Actual source code: cp.c
petsc-3.3-p7 2013-05-11
2: #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/
3: #include <../src/mat/impls/aij/seq/aij.h>
5: /*
6: Private context (data structure) for the CP preconditioner.
7: */
8: typedef struct {
9: PetscInt n,m;
10: Vec work;
11: PetscScalar *d; /* sum of squares of each column */
12: PetscScalar *a; /* non-zeros by column */
13: PetscInt *i,*j; /* offsets of nonzeros by column, non-zero indices by column */
14: } PC_CP;
19: static PetscErrorCode PCSetUp_CP(PC pc)
20: {
21: PC_CP *cp = (PC_CP*)pc->data;
22: PetscInt i,j,*colcnt;
24: PetscBool flg;
25: Mat_SeqAIJ *aij = (Mat_SeqAIJ*)pc->pmat->data;
28: PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQAIJ,&flg);
29: if (!flg) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Currently only handles SeqAIJ matrices");
30:
31: MatGetLocalSize(pc->pmat,&cp->m,&cp->n);
32: if (cp->m != cp->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently only for square matrices");
33:
34: if (!cp->work) {MatGetVecs(pc->pmat,&cp->work,PETSC_NULL);}
35: if (!cp->d) {PetscMalloc(cp->n*sizeof(PetscScalar),&cp->d);}
36: if (cp->a && pc->flag != SAME_NONZERO_PATTERN) {
37: PetscFree3(cp->a,cp->i,cp->j);
38: cp->a = 0;
39: }
41: /* convert to column format */
42: if (!cp->a) {
43: PetscMalloc3(aij->nz,PetscScalar,&cp->a,cp->n+1,PetscInt,&cp->i,aij->nz,PetscInt,&cp->j);
44: }
45: PetscMalloc(cp->n*sizeof(PetscInt),&colcnt);
46: PetscMemzero(colcnt,cp->n*sizeof(PetscInt));
48: for (i=0; i<aij->nz; i++) {
49: colcnt[aij->j[i]]++;
50: }
51: cp->i[0] = 0;
52: for (i=0; i<cp->n; i++) {
53: cp->i[i+1] = cp->i[i] + colcnt[i];
54: }
55: PetscMemzero(colcnt,cp->n*sizeof(PetscInt));
56: for (i=0; i<cp->m; i++) { /* over rows */
57: for (j=aij->i[i]; j<aij->i[i+1]; j++) { /* over columns in row */
58: cp->j[cp->i[aij->j[j]]+colcnt[aij->j[j]]] = i;
59: cp->a[cp->i[aij->j[j]]+colcnt[aij->j[j]]++] = aij->a[j];
60: }
61: }
62: PetscFree(colcnt);
64: /* compute sum of squares of each column d[] */
65: for (i=0; i<cp->n; i++) { /* over columns */
66: cp->d[i] = 0.;
67: for (j=cp->i[i]; j<cp->i[i+1]; j++) { /* over rows in column */
68: cp->d[i] += cp->a[j]*cp->a[j];
69: }
70: cp->d[i] = 1.0/cp->d[i];
71: }
72: return(0);
73: }
74: /* -------------------------------------------------------------------------- */
77: static PetscErrorCode PCApply_CP(PC pc,Vec bb,Vec xx)
78: {
79: PC_CP *cp = (PC_CP*)pc->data;
81: PetscScalar *b,*x,xt;
82: PetscInt i,j;
85: VecCopy(bb,cp->work);
86: VecGetArray(cp->work,&b);
87: VecGetArray(xx,&x);
89: for (i=0; i<cp->n; i++) { /* over columns */
90: xt = 0.;
91: for (j=cp->i[i]; j<cp->i[i+1]; j++) { /* over rows in column */
92: xt += cp->a[j]*b[cp->j[j]];
93: }
94: xt *= cp->d[i];
95: x[i] = xt;
96: for (j=cp->i[i]; j<cp->i[i+1]; j++) { /* over rows in column updating b*/
97: b[cp->j[j]] -= xt*cp->a[j];
98: }
99: }
100: for (i=cp->n-1; i>-1; i--) { /* over columns */
101: xt = 0.;
102: for (j=cp->i[i]; j<cp->i[i+1]; j++) { /* over rows in column */
103: xt += cp->a[j]*b[cp->j[j]];
104: }
105: xt *= cp->d[i];
106: x[i] = xt;
107: for (j=cp->i[i]; j<cp->i[i+1]; j++) { /* over rows in column updating b*/
108: b[cp->j[j]] -= xt*cp->a[j];
109: }
110: }
112: VecRestoreArray(cp->work,&b);
113: VecRestoreArray(xx,&x);
114: return(0);
115: }
116: /* -------------------------------------------------------------------------- */
119: static PetscErrorCode PCReset_CP(PC pc)
120: {
121: PC_CP *cp = (PC_CP*)pc->data;
125: PetscFree(cp->d);
126: VecDestroy(&cp->work);
127: PetscFree3(cp->a,cp->i,cp->j);
128: return(0);
129: }
133: static PetscErrorCode PCDestroy_CP(PC pc)
134: {
135: PC_CP *cp = (PC_CP*)pc->data;
139: PCReset_CP(pc);
140: PetscFree(cp->d);
141: PetscFree3(cp->a,cp->i,cp->j);
142: PetscFree(pc->data);
143: return(0);
144: }
148: static PetscErrorCode PCSetFromOptions_CP(PC pc)
149: {
151: return(0);
152: }
155: /*MC
156: PCCP - a "column-projection" preconditioner
158: This is a terrible preconditioner and is not recommended, ever!
160: Loops over the entries of x computing dx_i to
161: $
162: $ min || b - A(x + dx_i e_i ||_2
163: $ dx_i
164: $
165: $ That is, it changes a single entry of x to minimize the new residual.
166: $ Let A_i represent the ith column of A, then the minimization can be written as
167: $
168: $ min || r - (dx_i) A e_i ||_2
169: $ dx_i
170: $ or min || r - (dx_i) A_i ||_2
171: $ dx_i
172: $
173: $ take the derivative with respect to dx_i to obtain
174: $ dx_i = (A_i^T A_i)^(-1) A_i^T r
175: $
176: $ This algorithm can be thought of as Gauss-Seidel on the normal equations
178: Notes: This proceedure can also be done with block columns or any groups of columns
179: but this is not coded.
181: These "projections" can be done simultaneously for all columns (similar to Jacobi)
182: or sequentially (similar to Gauss-Seidel/SOR). This is only coded for SOR type.
184: This is related to, but not the same as "row projection" methods.
186: This is currently coded only for SeqAIJ matrices in sequential (SOR) form.
187:
188: Level: intermediate
190: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCJACOBI, PCSOR
192: M*/
194: EXTERN_C_BEGIN
197: PetscErrorCode PCCreate_CP(PC pc)
198: {
199: PC_CP *cp;
203: PetscNewLog(pc,PC_CP,&cp);
204: pc->data = (void*)cp;
206: pc->ops->apply = PCApply_CP;
207: pc->ops->applytranspose = PCApply_CP;
208: pc->ops->setup = PCSetUp_CP;
209: pc->ops->reset = PCReset_CP;
210: pc->ops->destroy = PCDestroy_CP;
211: pc->ops->setfromoptions = PCSetFromOptions_CP;
212: pc->ops->view = 0;
213: pc->ops->applyrichardson = 0;
214: return(0);
215: }
216: EXTERN_C_END