Actual source code: cp.c
2: #include <petsc/private/pcimpl.h>
3: #include <../src/mat/impls/aij/seq/aij.h>
5: /*
6: Private context (data structure) for the CP preconditioner.
7: */
8: typedef struct {
9: PetscInt n,m;
10: Vec work;
11: PetscScalar *d; /* sum of squares of each column */
12: PetscScalar *a; /* non-zeros by column */
13: PetscInt *i,*j; /* offsets of nonzeros by column, non-zero indices by column */
14: } PC_CP;
16: static PetscErrorCode PCSetUp_CP(PC pc)
17: {
18: PC_CP *cp = (PC_CP*)pc->data;
19: PetscInt i,j,*colcnt;
21: PetscBool flg;
22: Mat_SeqAIJ *aij = (Mat_SeqAIJ*)pc->pmat->data;
25: PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQAIJ,&flg);
26: if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Currently only handles SeqAIJ matrices");
28: MatGetLocalSize(pc->pmat,&cp->m,&cp->n);
29: if (cp->m != cp->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently only for square matrices");
31: if (!cp->work) {MatCreateVecs(pc->pmat,&cp->work,NULL);}
32: if (!cp->d) {PetscMalloc1(cp->n,&cp->d);}
33: if (cp->a && pc->flag != SAME_NONZERO_PATTERN) {
34: PetscFree3(cp->a,cp->i,cp->j);
35: cp->a = NULL;
36: }
38: /* convert to column format */
39: if (!cp->a) {
40: PetscMalloc3(aij->nz,&cp->a,cp->n+1,&cp->i,aij->nz,&cp->j);
41: }
42: PetscCalloc1(cp->n,&colcnt);
44: for (i=0; i<aij->nz; i++) colcnt[aij->j[i]]++;
45: cp->i[0] = 0;
46: for (i=0; i<cp->n; i++) cp->i[i+1] = cp->i[i] + colcnt[i];
47: PetscArrayzero(colcnt,cp->n);
48: for (i=0; i<cp->m; i++) { /* over rows */
49: for (j=aij->i[i]; j<aij->i[i+1]; j++) { /* over columns in row */
50: cp->j[cp->i[aij->j[j]]+colcnt[aij->j[j]]] = i;
51: cp->a[cp->i[aij->j[j]]+colcnt[aij->j[j]]++] = aij->a[j];
52: }
53: }
54: PetscFree(colcnt);
56: /* compute sum of squares of each column d[] */
57: for (i=0; i<cp->n; i++) { /* over columns */
58: cp->d[i] = 0.;
59: for (j=cp->i[i]; j<cp->i[i+1]; j++) cp->d[i] += cp->a[j]*cp->a[j]; /* over rows in column */
60: cp->d[i] = 1.0/cp->d[i];
61: }
62: return(0);
63: }
64: /* -------------------------------------------------------------------------- */
65: static PetscErrorCode PCApply_CP(PC pc,Vec bb,Vec xx)
66: {
67: PC_CP *cp = (PC_CP*)pc->data;
69: PetscScalar *b,*x,xt;
70: PetscInt i,j;
73: VecCopy(bb,cp->work);
74: VecGetArray(cp->work,&b);
75: VecGetArray(xx,&x);
77: for (i=0; i<cp->n; i++) { /* over columns */
78: xt = 0.;
79: for (j=cp->i[i]; j<cp->i[i+1]; j++) xt += cp->a[j]*b[cp->j[j]]; /* over rows in column */
80: xt *= cp->d[i];
81: x[i] = xt;
82: for (j=cp->i[i]; j<cp->i[i+1]; j++) b[cp->j[j]] -= xt*cp->a[j]; /* over rows in column updating b*/
83: }
84: for (i=cp->n-1; i>-1; i--) { /* over columns */
85: xt = 0.;
86: for (j=cp->i[i]; j<cp->i[i+1]; j++) xt += cp->a[j]*b[cp->j[j]]; /* over rows in column */
87: xt *= cp->d[i];
88: x[i] = xt;
89: for (j=cp->i[i]; j<cp->i[i+1]; j++) b[cp->j[j]] -= xt*cp->a[j]; /* over rows in column updating b*/
90: }
92: VecRestoreArray(cp->work,&b);
93: VecRestoreArray(xx,&x);
94: return(0);
95: }
96: /* -------------------------------------------------------------------------- */
97: static PetscErrorCode PCReset_CP(PC pc)
98: {
99: PC_CP *cp = (PC_CP*)pc->data;
103: PetscFree(cp->d);
104: VecDestroy(&cp->work);
105: PetscFree3(cp->a,cp->i,cp->j);
106: return(0);
107: }
109: static PetscErrorCode PCDestroy_CP(PC pc)
110: {
111: PC_CP *cp = (PC_CP*)pc->data;
115: PCReset_CP(pc);
116: PetscFree(cp->d);
117: PetscFree3(cp->a,cp->i,cp->j);
118: PetscFree(pc->data);
119: return(0);
120: }
122: static PetscErrorCode PCSetFromOptions_CP(PetscOptionItems *PetscOptionsObject,PC pc)
123: {
125: return(0);
126: }
128: /*MC
129: PCCP - a "column-projection" preconditioner
131: This is a terrible preconditioner and is not recommended, ever!
133: Loops over the entries of x computing dx_i (e_i is the unit vector in the ith direction) to
134: $
135: $ min || b - A(x + dx_i e_i ||_2
136: $ dx_i
137: $
138: $ That is, it changes a single entry of x to minimize the new residual norm.
139: $ Let A_i represent the ith column of A, then the minimization can be written as
140: $
141: $ min || r - (dx_i) A e_i ||_2
142: $ dx_i
143: $ or min || r - (dx_i) A_i ||_2
144: $ dx_i
145: $
146: $ take the derivative with respect to dx_i to obtain
147: $ dx_i = (A_i^T A_i)^(-1) A_i^T r
148: $
149: $ This algorithm can be thought of as Gauss-Seidel on the normal equations
151: Notes:
152: This proceedure can also be done with block columns or any groups of columns
153: but this is not coded.
155: These "projections" can be done simultaneously for all columns (similar to Jacobi)
156: or sequentially (similar to Gauss-Seidel/SOR). This is only coded for SOR type.
158: This is related to, but not the same as "row projection" methods.
160: This is currently coded only for SeqAIJ matrices in sequential (SOR) form.
162: Level: intermediate
164: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCJACOBI, PCSOR
166: M*/
168: PETSC_EXTERN PetscErrorCode PCCreate_CP(PC pc)
169: {
170: PC_CP *cp;
174: PetscNewLog(pc,&cp);
175: pc->data = (void*)cp;
177: pc->ops->apply = PCApply_CP;
178: pc->ops->applytranspose = PCApply_CP;
179: pc->ops->setup = PCSetUp_CP;
180: pc->ops->reset = PCReset_CP;
181: pc->ops->destroy = PCDestroy_CP;
182: pc->ops->setfromoptions = PCSetFromOptions_CP;
183: pc->ops->view = NULL;
184: pc->ops->applyrichardson = NULL;
185: return(0);
186: }