2: #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/
3: #include <../src/mat/impls/aij/seq/aij.h>
5: /*
6: Private context (data structure) for the CP preconditioner.
7: */
8: typedef struct {
9: PetscInt n,m;
10: Vec work;
11: PetscScalar *d; /* sum of squares of each column */
12: PetscScalar *a; /* non-zeros by column */
13: PetscInt *i,*j; /* offsets of nonzeros by column, non-zero indices by column */
14: } PC_CP;
19: static PetscErrorCode PCSetUp_CP(PC pc) 20: {
21: PC_CP *cp = (PC_CP*)pc->data;
22: PetscInt i,j,*colcnt;
24: PetscBool flg;
25: Mat_SeqAIJ *aij = (Mat_SeqAIJ*)pc->pmat->data;
28: PetscObjectTypeCompare((PetscObject)pc->pmat,MATSEQAIJ,&flg);
29: if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Currently only handles SeqAIJ matrices");
31: MatGetLocalSize(pc->pmat,&cp->m,&cp->n);
32: if (cp->m != cp->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Currently only for square matrices");
34: if (!cp->work) {MatGetVecs(pc->pmat,&cp->work,NULL);}
35: if (!cp->d) {PetscMalloc(cp->n*sizeof(PetscScalar),&cp->d);}
36: if (cp->a && pc->flag != SAME_NONZERO_PATTERN) {
37: PetscFree3(cp->a,cp->i,cp->j);
38: cp->a = 0;
39: }
41: /* convert to column format */
42: if (!cp->a) {
43: PetscMalloc3(aij->nz,PetscScalar,&cp->a,cp->n+1,PetscInt,&cp->i,aij->nz,PetscInt,&cp->j);
44: }
45: PetscMalloc(cp->n*sizeof(PetscInt),&colcnt);
46: PetscMemzero(colcnt,cp->n*sizeof(PetscInt));
48: for (i=0; i<aij->nz; i++) colcnt[aij->j[i]]++;
49: cp->i[0] = 0;
50: for (i=0; i<cp->n; i++) cp->i[i+1] = cp->i[i] + colcnt[i];
51: PetscMemzero(colcnt,cp->n*sizeof(PetscInt));
52: for (i=0; i<cp->m; i++) { /* over rows */
53: for (j=aij->i[i]; j<aij->i[i+1]; j++) { /* over columns in row */
54: cp->j[cp->i[aij->j[j]]+colcnt[aij->j[j]]] = i;
55: cp->a[cp->i[aij->j[j]]+colcnt[aij->j[j]]++] = aij->a[j];
56: }
57: }
58: PetscFree(colcnt);
60: /* compute sum of squares of each column d[] */
61: for (i=0; i<cp->n; i++) { /* over columns */
62: cp->d[i] = 0.;
63: for (j=cp->i[i]; j<cp->i[i+1]; j++) cp->d[i] += cp->a[j]*cp->a[j]; /* over rows in column */
64: cp->d[i] = 1.0/cp->d[i];
65: }
66: return(0);
67: }
68: /* -------------------------------------------------------------------------- */
71: static PetscErrorCode PCApply_CP(PC pc,Vec bb,Vec xx) 72: {
73: PC_CP *cp = (PC_CP*)pc->data;
75: PetscScalar *b,*x,xt;
76: PetscInt i,j;
79: VecCopy(bb,cp->work);
80: VecGetArray(cp->work,&b);
81: VecGetArray(xx,&x);
83: for (i=0; i<cp->n; i++) { /* over columns */
84: xt = 0.;
85: for (j=cp->i[i]; j<cp->i[i+1]; j++) xt += cp->a[j]*b[cp->j[j]]; /* over rows in column */
86: xt *= cp->d[i];
87: x[i] = xt;
88: for (j=cp->i[i]; j<cp->i[i+1]; j++) b[cp->j[j]] -= xt*cp->a[j]; /* over rows in column updating b*/
89: }
90: for (i=cp->n-1; i>-1; i--) { /* over columns */
91: xt = 0.;
92: for (j=cp->i[i]; j<cp->i[i+1]; j++) xt += cp->a[j]*b[cp->j[j]]; /* over rows in column */
93: xt *= cp->d[i];
94: x[i] = xt;
95: for (j=cp->i[i]; j<cp->i[i+1]; j++) b[cp->j[j]] -= xt*cp->a[j]; /* over rows in column updating b*/
96: }
98: VecRestoreArray(cp->work,&b);
99: VecRestoreArray(xx,&x);
100: return(0);
101: }
102: /* -------------------------------------------------------------------------- */
105: static PetscErrorCode PCReset_CP(PC pc)106: {
107: PC_CP *cp = (PC_CP*)pc->data;
111: PetscFree(cp->d);
112: VecDestroy(&cp->work);
113: PetscFree3(cp->a,cp->i,cp->j);
114: return(0);
115: }
119: static PetscErrorCode PCDestroy_CP(PC pc)120: {
121: PC_CP *cp = (PC_CP*)pc->data;
125: PCReset_CP(pc);
126: PetscFree(cp->d);
127: PetscFree3(cp->a,cp->i,cp->j);
128: PetscFree(pc->data);
129: return(0);
130: }
134: static PetscErrorCode PCSetFromOptions_CP(PC pc)135: {
137: return(0);
138: }
141: /*MC
142: PCCP - a "column-projection" preconditioner
144: This is a terrible preconditioner and is not recommended, ever!
146: Loops over the entries of x computing dx_i to
147: $
148: $ min || b - A(x + dx_i e_i ||_2
149: $ dx_i
150: $
151: $ That is, it changes a single entry of x to minimize the new residual.
152: $ Let A_i represent the ith column of A, then the minimization can be written as
153: $
154: $ min || r - (dx_i) A e_i ||_2
155: $ dx_i
156: $ or min || r - (dx_i) A_i ||_2
157: $ dx_i
158: $
159: $ take the derivative with respect to dx_i to obtain
160: $ dx_i = (A_i^T A_i)^(-1) A_i^T r
161: $
162: $ This algorithm can be thought of as Gauss-Seidel on the normal equations
164: Notes: This proceedure can also be done with block columns or any groups of columns
165: but this is not coded.
167: These "projections" can be done simultaneously for all columns (similar to Jacobi)
168: or sequentially (similar to Gauss-Seidel/SOR). This is only coded for SOR type.
170: This is related to, but not the same as "row projection" methods.
172: This is currently coded only for SeqAIJ matrices in sequential (SOR) form.
174: Level: intermediate
176: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PCJACOBI, PCSOR178: M*/
182: PETSC_EXTERN PetscErrorCode PCCreate_CP(PC pc)183: {
184: PC_CP *cp;
188: PetscNewLog(pc,PC_CP,&cp);
189: pc->data = (void*)cp;
191: pc->ops->apply = PCApply_CP;
192: pc->ops->applytranspose = PCApply_CP;
193: pc->ops->setup = PCSetUp_CP;
194: pc->ops->reset = PCReset_CP;
195: pc->ops->destroy = PCDestroy_CP;
196: pc->ops->setfromoptions = PCSetFromOptions_CP;
197: pc->ops->view = 0;
198: pc->ops->applyrichardson = 0;
199: return(0);
200: }