Actual source code: agg.c
petsc-3.11.4 2019-09-28
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
5: #include <../src/ksp/pc/impls/gamg/gamg.h>
6: /* Next line needed to deactivate KSP_Solve logging */
7: #include <petsc/private/kspimpl.h>
8: #include <petscblaslapack.h>
9: #include <petscdm.h>
11: typedef struct {
12: PetscInt nsmooths;
13: PetscBool sym_graph;
14: PetscInt square_graph;
15: } PC_GAMG_AGG;
17: /*@
18: PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical)
20: Not Collective on PC
22: Input Parameters:
23: . pc - the preconditioner context
25: Options Database Key:
26: . -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation
28: Level: intermediate
30: Concepts: Aggregation AMG preconditioner
32: .seealso: ()
33: @*/
34: PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n)
35: {
40: PetscTryMethod(pc,"PCGAMGSetNSmooths_C",(PC,PetscInt),(pc,n));
41: return(0);
42: }
44: static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n)
45: {
46: PC_MG *mg = (PC_MG*)pc->data;
47: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
48: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
51: pc_gamg_agg->nsmooths = n;
52: return(0);
53: }
55: /*@
56: PCGAMGSetSymGraph - Symmetrize the graph before computing the aggregation. Some algorithms require the graph be symmetric
58: Not Collective on PC
60: Input Parameters:
61: + pc - the preconditioner context
62: . n - PETSC_TRUE or PETSC_FALSE
64: Options Database Key:
65: . -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation
67: Level: intermediate
69: Concepts: Aggregation AMG preconditioner
71: .seealso: PCGAMGSetSquareGraph()
72: @*/
73: PetscErrorCode PCGAMGSetSymGraph(PC pc, PetscBool n)
74: {
79: PetscTryMethod(pc,"PCGAMGSetSymGraph_C",(PC,PetscBool),(pc,n));
80: return(0);
81: }
83: static PetscErrorCode PCGAMGSetSymGraph_AGG(PC pc, PetscBool n)
84: {
85: PC_MG *mg = (PC_MG*)pc->data;
86: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
87: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
90: pc_gamg_agg->sym_graph = n;
91: return(0);
92: }
94: /*@
95: PCGAMGSetSquareGraph - Square the graph, ie. compute A'*A before aggregating it
97: Not Collective on PC
99: Input Parameters:
100: + pc - the preconditioner context
101: - n - PETSC_TRUE or PETSC_FALSE
103: Options Database Key:
104: . -pc_gamg_square_graph <n,default = 1> - number of levels to square the graph on before aggregating it
106: Notes:
107: Squaring the graph increases the rate of coarsening (aggressive coarsening) and thereby reduces the complexity of the coarse grids, and generally results in slower solver converge rates. Reducing coarse grid complexity reduced the complexity of Galerkin coarse grid construction considerably.
109: Level: intermediate
111: Concepts: Aggregation AMG preconditioner
113: .seealso: PCGAMGSetSymGraph(), PCGAMGSetThreshold()
114: @*/
115: PetscErrorCode PCGAMGSetSquareGraph(PC pc, PetscInt n)
116: {
121: PetscTryMethod(pc,"PCGAMGSetSquareGraph_C",(PC,PetscInt),(pc,n));
122: return(0);
123: }
125: static PetscErrorCode PCGAMGSetSquareGraph_AGG(PC pc, PetscInt n)
126: {
127: PC_MG *mg = (PC_MG*)pc->data;
128: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
129: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
132: pc_gamg_agg->square_graph = n;
133: return(0);
134: }
136: static PetscErrorCode PCSetFromOptions_GAMG_AGG(PetscOptionItems *PetscOptionsObject,PC pc)
137: {
139: PC_MG *mg = (PC_MG*)pc->data;
140: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
141: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
144: PetscOptionsHead(PetscOptionsObject,"GAMG-AGG options");
145: {
146: PetscOptionsInt("-pc_gamg_agg_nsmooths","smoothing steps for smoothed aggregation, usually 1","PCGAMGSetNSmooths",pc_gamg_agg->nsmooths,&pc_gamg_agg->nsmooths,NULL);
147: PetscOptionsBool("-pc_gamg_sym_graph","Set for asymmetric matrices","PCGAMGSetSymGraph",pc_gamg_agg->sym_graph,&pc_gamg_agg->sym_graph,NULL);
148: PetscOptionsInt("-pc_gamg_square_graph","Number of levels to square graph for faster coarsening and lower coarse grid complexity","PCGAMGSetSquareGraph",pc_gamg_agg->square_graph,&pc_gamg_agg->square_graph,NULL);
149: }
150: PetscOptionsTail();
151: return(0);
152: }
154: /* -------------------------------------------------------------------------- */
155: static PetscErrorCode PCDestroy_GAMG_AGG(PC pc)
156: {
158: PC_MG *mg = (PC_MG*)pc->data;
159: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
162: PetscFree(pc_gamg->subctx);
163: PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",NULL);
164: return(0);
165: }
167: /* -------------------------------------------------------------------------- */
168: /*
169: PCSetCoordinates_AGG
170: - collective
172: Input Parameter:
173: . pc - the preconditioner context
174: . ndm - dimesion of data (used for dof/vertex for Stokes)
175: . a_nloc - number of vertices local
176: . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...}
177: */
179: static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords)
180: {
181: PC_MG *mg = (PC_MG*)pc->data;
182: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
184: PetscInt arrsz,kk,ii,jj,nloc,ndatarows,ndf;
185: Mat mat = pc->pmat;
190: nloc = a_nloc;
192: /* SA: null space vectors */
193: MatGetBlockSize(mat, &ndf); /* this does not work for Stokes */
194: if (coords && ndf==1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */
195: else if (coords) {
196: if (ndm > ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"degrees of motion %D > block size %D",ndm,ndf);
197: pc_gamg->data_cell_cols = (ndm==2 ? 3 : 6); /* displacement elasticity */
198: if (ndm != ndf) {
199: if (pc_gamg->data_cell_cols != ndf) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Don't know how to create null space for ndm=%D, ndf=%D. Use MatSetNearNullSpace.",ndm,ndf);
200: }
201: } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */
202: pc_gamg->data_cell_rows = ndatarows = ndf;
203: if (pc_gamg->data_cell_cols <= 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_cell_cols %D <= 0",pc_gamg->data_cell_cols);
204: arrsz = nloc*pc_gamg->data_cell_rows*pc_gamg->data_cell_cols;
206: /* create data - syntactic sugar that should be refactored at some point */
207: if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) {
208: PetscFree(pc_gamg->data);
209: PetscMalloc1(arrsz+1, &pc_gamg->data);
210: }
211: /* copy data in - column oriented */
212: for (kk=0; kk<nloc; kk++) {
213: const PetscInt M = nloc*pc_gamg->data_cell_rows; /* stride into data */
214: PetscReal *data = &pc_gamg->data[kk*ndatarows]; /* start of cell */
215: if (pc_gamg->data_cell_cols==1) *data = 1.0;
216: else {
217: /* translational modes */
218: for (ii=0;ii<ndatarows;ii++) {
219: for (jj=0;jj<ndatarows;jj++) {
220: if (ii==jj)data[ii*M + jj] = 1.0;
221: else data[ii*M + jj] = 0.0;
222: }
223: }
225: /* rotational modes */
226: if (coords) {
227: if (ndm == 2) {
228: data += 2*M;
229: data[0] = -coords[2*kk+1];
230: data[1] = coords[2*kk];
231: } else {
232: data += 3*M;
233: data[0] = 0.0; data[M+0] = coords[3*kk+2]; data[2*M+0] = -coords[3*kk+1];
234: data[1] = -coords[3*kk+2]; data[M+1] = 0.0; data[2*M+1] = coords[3*kk];
235: data[2] = coords[3*kk+1]; data[M+2] = -coords[3*kk]; data[2*M+2] = 0.0;
236: }
237: }
238: }
239: }
241: pc_gamg->data_sz = arrsz;
242: return(0);
243: }
245: typedef PetscInt NState;
246: static const NState NOT_DONE=-2;
247: static const NState DELETED =-1;
248: static const NState REMOVED =-3;
249: #define IS_SELECTED(s) (s!=DELETED && s!=NOT_DONE && s!=REMOVED)
251: /* -------------------------------------------------------------------------- */
252: /*
253: smoothAggs - greedy grab of with G1 (unsquared graph) -- AIJ specific
254: - AGG-MG specific: clears singletons out of 'selected_2'
256: Input Parameter:
257: . Gmat_2 - glabal matrix of graph (data not defined) base (squared) graph
258: . Gmat_1 - base graph to grab with base graph
259: Input/Output Parameter:
260: . aggs_2 - linked list of aggs with gids)
261: */
262: static PetscErrorCode smoothAggs(PC pc,Mat Gmat_2, Mat Gmat_1,PetscCoarsenData *aggs_2)
263: {
265: PetscBool isMPI;
266: Mat_SeqAIJ *matA_1, *matB_1=0;
267: MPI_Comm comm;
268: PetscInt lid,*ii,*idx,ix,Iend,my0,kk,n,j;
269: Mat_MPIAIJ *mpimat_2 = 0, *mpimat_1=0;
270: const PetscInt nloc = Gmat_2->rmap->n;
271: PetscScalar *cpcol_1_state,*cpcol_2_state,*cpcol_2_par_orig,*lid_parent_gid;
272: PetscInt *lid_cprowID_1;
273: NState *lid_state;
274: Vec ghost_par_orig2;
277: PetscObjectGetComm((PetscObject)Gmat_2,&comm);
278: MatGetOwnershipRange(Gmat_1,&my0,&Iend);
280: /* get submatrices */
281: PetscStrbeginswith(((PetscObject)Gmat_1)->type_name,MATMPIAIJ,&isMPI);
282: if (isMPI) {
283: /* grab matrix objects */
284: mpimat_2 = (Mat_MPIAIJ*)Gmat_2->data;
285: mpimat_1 = (Mat_MPIAIJ*)Gmat_1->data;
286: matA_1 = (Mat_SeqAIJ*)mpimat_1->A->data;
287: matB_1 = (Mat_SeqAIJ*)mpimat_1->B->data;
289: /* force compressed row storage for B matrix in AuxMat */
290: MatCheckCompressedRow(mpimat_1->B,matB_1->nonzerorowcnt,&matB_1->compressedrow,matB_1->i,Gmat_1->rmap->n,-1.0);
292: PetscMalloc1(nloc, &lid_cprowID_1);
293: for (lid = 0; lid < nloc; lid++) lid_cprowID_1[lid] = -1;
294: for (ix=0; ix<matB_1->compressedrow.nrows; ix++) {
295: PetscInt lid = matB_1->compressedrow.rindex[ix];
296: lid_cprowID_1[lid] = ix;
297: }
298: } else {
299: PetscBool isAIJ;
300: PetscStrbeginswith(((PetscObject)Gmat_1)->type_name,MATSEQAIJ,&isAIJ);
301: if (!isAIJ) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Require AIJ matrix.");
302: matA_1 = (Mat_SeqAIJ*)Gmat_1->data;
303: lid_cprowID_1 = NULL;
304: }
305: if (nloc>0) {
306: if (matB_1 && !matB_1->compressedrow.use) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"matB_1 && !matB_1->compressedrow.use: PETSc bug???");
307: }
308: /* get state of locals and selected gid for deleted */
309: PetscMalloc2(nloc, &lid_state,nloc, &lid_parent_gid);
310: for (lid = 0; lid < nloc; lid++) {
311: lid_parent_gid[lid] = -1.0;
312: lid_state[lid] = DELETED;
313: }
315: /* set lid_state */
316: for (lid = 0; lid < nloc; lid++) {
317: PetscCDIntNd *pos;
318: PetscCDGetHeadPos(aggs_2,lid,&pos);
319: if (pos) {
320: PetscInt gid1;
322: PetscCDIntNdGetID(pos, &gid1);
323: if (gid1 != lid+my0) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"gid1 %D != lid %D + my0 %D",gid1,lid,my0);
324: lid_state[lid] = gid1;
325: }
326: }
328: /* map local to selected local, DELETED means a ghost owns it */
329: for (lid=kk=0; lid<nloc; lid++) {
330: NState state = lid_state[lid];
331: if (IS_SELECTED(state)) {
332: PetscCDIntNd *pos;
333: PetscCDGetHeadPos(aggs_2,lid,&pos);
334: while (pos) {
335: PetscInt gid1;
336: PetscCDIntNdGetID(pos, &gid1);
337: PetscCDGetNextPos(aggs_2,lid,&pos);
339: if (gid1 >= my0 && gid1 < Iend) lid_parent_gid[gid1-my0] = (PetscScalar)(lid + my0);
340: }
341: }
342: }
343: /* get 'cpcol_1/2_state' & cpcol_2_par_orig - uses mpimat_1/2->lvec for temp space */
344: if (isMPI) {
345: Vec tempVec;
346: /* get 'cpcol_1_state' */
347: MatCreateVecs(Gmat_1, &tempVec, 0);
348: for (kk=0,j=my0; kk<nloc; kk++,j++) {
349: PetscScalar v = (PetscScalar)lid_state[kk];
350: VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES);
351: }
352: VecAssemblyBegin(tempVec);
353: VecAssemblyEnd(tempVec);
354: VecScatterBegin(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD);
355: VecScatterEnd(mpimat_1->Mvctx,tempVec, mpimat_1->lvec,INSERT_VALUES,SCATTER_FORWARD);
356: VecGetArray(mpimat_1->lvec, &cpcol_1_state);
357: /* get 'cpcol_2_state' */
358: VecScatterBegin(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD);
359: VecScatterEnd(mpimat_2->Mvctx,tempVec, mpimat_2->lvec,INSERT_VALUES,SCATTER_FORWARD);
360: VecGetArray(mpimat_2->lvec, &cpcol_2_state);
361: /* get 'cpcol_2_par_orig' */
362: for (kk=0,j=my0; kk<nloc; kk++,j++) {
363: PetscScalar v = (PetscScalar)lid_parent_gid[kk];
364: VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES);
365: }
366: VecAssemblyBegin(tempVec);
367: VecAssemblyEnd(tempVec);
368: VecDuplicate(mpimat_2->lvec, &ghost_par_orig2);
369: VecScatterBegin(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD);
370: VecScatterEnd(mpimat_2->Mvctx,tempVec, ghost_par_orig2,INSERT_VALUES,SCATTER_FORWARD);
371: VecGetArray(ghost_par_orig2, &cpcol_2_par_orig);
373: VecDestroy(&tempVec);
374: } /* ismpi */
376: /* doit */
377: for (lid=0; lid<nloc; lid++) {
378: NState state = lid_state[lid];
379: if (IS_SELECTED(state)) {
380: /* steal locals */
381: ii = matA_1->i; n = ii[lid+1] - ii[lid];
382: idx = matA_1->j + ii[lid];
383: for (j=0; j<n; j++) {
384: PetscInt lidj = idx[j], sgid;
385: NState statej = lid_state[lidj];
386: if (statej==DELETED && (sgid=(PetscInt)PetscRealPart(lid_parent_gid[lidj])) != lid+my0) { /* steal local */
387: lid_parent_gid[lidj] = (PetscScalar)(lid+my0); /* send this if sgid is not local */
388: if (sgid >= my0 && sgid < Iend) { /* I'm stealing this local from a local sgid */
389: PetscInt hav=0,slid=sgid-my0,gidj=lidj+my0;
390: PetscCDIntNd *pos,*last=NULL;
391: /* looking for local from local so id_llist_2 works */
392: PetscCDGetHeadPos(aggs_2,slid,&pos);
393: while (pos) {
394: PetscInt gid;
395: PetscCDIntNdGetID(pos, &gid);
396: if (gid == gidj) {
397: if (!last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"last cannot be null");
398: PetscCDRemoveNextNode(aggs_2, slid, last);
399: PetscCDAppendNode(aggs_2, lid, pos);
400: hav = 1;
401: break;
402: } else last = pos;
404: PetscCDGetNextPos(aggs_2,slid,&pos);
405: }
406: if (hav!=1) {
407: if (!hav) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix");
408: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"found node %D times???",hav);
409: }
410: } else { /* I'm stealing this local, owned by a ghost */
411: if (sgid != -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Have un-symmetric graph (apparently). Use '-%spc_gamg_sym_graph true' to symetrize the graph or '-%spc_gamg_threshold -1' if the matrix is structurally symmetric.",((PetscObject)pc)->prefix,((PetscObject)pc)->prefix);
412: PetscCDAppendID(aggs_2, lid, lidj+my0);
413: }
414: }
415: } /* local neighbors */
416: } else if (state == DELETED && lid_cprowID_1) {
417: PetscInt sgidold = (PetscInt)PetscRealPart(lid_parent_gid[lid]);
418: /* see if I have a selected ghost neighbor that will steal me */
419: if ((ix=lid_cprowID_1[lid]) != -1) {
420: ii = matB_1->compressedrow.i; n = ii[ix+1] - ii[ix];
421: idx = matB_1->j + ii[ix];
422: for (j=0; j<n; j++) {
423: PetscInt cpid = idx[j];
424: NState statej = (NState)PetscRealPart(cpcol_1_state[cpid]);
425: if (IS_SELECTED(statej) && sgidold != (PetscInt)statej) { /* ghost will steal this, remove from my list */
426: lid_parent_gid[lid] = (PetscScalar)statej; /* send who selected */
427: if (sgidold>=my0 && sgidold<Iend) { /* this was mine */
428: PetscInt hav=0,oldslidj=sgidold-my0;
429: PetscCDIntNd *pos,*last=NULL;
430: /* remove from 'oldslidj' list */
431: PetscCDGetHeadPos(aggs_2,oldslidj,&pos);
432: while (pos) {
433: PetscInt gid;
434: PetscCDIntNdGetID(pos, &gid);
435: if (lid+my0 == gid) {
436: /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove lid from oldslidj list *\/ */
437: if (!last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"last cannot be null");
438: PetscCDRemoveNextNode(aggs_2, oldslidj, last);
439: /* ghost (PetscScalar)statej will add this later */
440: hav = 1;
441: break;
442: } else last = pos;
444: PetscCDGetNextPos(aggs_2,oldslidj,&pos);
445: }
446: if (hav!=1) {
447: if (!hav) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"failed to find adj in 'selected' lists - structurally unsymmetric matrix");
448: SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"found node %D times???",hav);
449: }
450: } else {
451: /* ghosts remove this later */
452: }
453: }
454: }
455: }
456: } /* selected/deleted */
457: } /* node loop */
459: if (isMPI) {
460: PetscScalar *cpcol_2_parent,*cpcol_2_gid;
461: Vec tempVec,ghostgids2,ghostparents2;
462: PetscInt cpid,nghost_2;
463: PCGAMGHashTable gid_cpid;
465: VecGetSize(mpimat_2->lvec, &nghost_2);
466: MatCreateVecs(Gmat_2, &tempVec, 0);
468: /* get 'cpcol_2_parent' */
469: for (kk=0,j=my0; kk<nloc; kk++,j++) {
470: VecSetValues(tempVec, 1, &j, &lid_parent_gid[kk], INSERT_VALUES);
471: }
472: VecAssemblyBegin(tempVec);
473: VecAssemblyEnd(tempVec);
474: VecDuplicate(mpimat_2->lvec, &ghostparents2);
475: VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD);
476: VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostparents2,INSERT_VALUES,SCATTER_FORWARD);
477: VecGetArray(ghostparents2, &cpcol_2_parent);
479: /* get 'cpcol_2_gid' */
480: for (kk=0,j=my0; kk<nloc; kk++,j++) {
481: PetscScalar v = (PetscScalar)j;
482: VecSetValues(tempVec, 1, &j, &v, INSERT_VALUES);
483: }
484: VecAssemblyBegin(tempVec);
485: VecAssemblyEnd(tempVec);
486: VecDuplicate(mpimat_2->lvec, &ghostgids2);
487: VecScatterBegin(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD);
488: VecScatterEnd(mpimat_2->Mvctx,tempVec, ghostgids2,INSERT_VALUES,SCATTER_FORWARD);
489: VecGetArray(ghostgids2, &cpcol_2_gid);
490: VecDestroy(&tempVec);
492: /* look for deleted ghosts and add to table */
493: PCGAMGHashTableCreate(2*nghost_2+1, &gid_cpid);
494: for (cpid = 0; cpid < nghost_2; cpid++) {
495: NState state = (NState)PetscRealPart(cpcol_2_state[cpid]);
496: if (state==DELETED) {
497: PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]);
498: PetscInt sgid_old = (PetscInt)PetscRealPart(cpcol_2_par_orig[cpid]);
499: if (sgid_old == -1 && sgid_new != -1) {
500: PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]);
501: PCGAMGHashTableAdd(&gid_cpid, gid, cpid);
502: }
503: }
504: }
506: /* look for deleted ghosts and see if they moved - remove it */
507: for (lid=0; lid<nloc; lid++) {
508: NState state = lid_state[lid];
509: if (IS_SELECTED(state)) {
510: PetscCDIntNd *pos,*last=NULL;
511: /* look for deleted ghosts and see if they moved */
512: PetscCDGetHeadPos(aggs_2,lid,&pos);
513: while (pos) {
514: PetscInt gid;
515: PetscCDIntNdGetID(pos, &gid);
517: if (gid < my0 || gid >= Iend) {
518: PCGAMGHashTableFind(&gid_cpid, gid, &cpid);
519: if (cpid != -1) {
520: /* a moved ghost - */
521: /* id_llist_2[lastid] = id_llist_2[flid]; /\* remove 'flid' from list *\/ */
522: PetscCDRemoveNextNode(aggs_2, lid, last);
523: } else last = pos;
524: } else last = pos;
526: PetscCDGetNextPos(aggs_2,lid,&pos);
527: } /* loop over list of deleted */
528: } /* selected */
529: }
530: PCGAMGHashTableDestroy(&gid_cpid);
532: /* look at ghosts, see if they changed - and it */
533: for (cpid = 0; cpid < nghost_2; cpid++) {
534: PetscInt sgid_new = (PetscInt)PetscRealPart(cpcol_2_parent[cpid]);
535: if (sgid_new >= my0 && sgid_new < Iend) { /* this is mine */
536: PetscInt gid = (PetscInt)PetscRealPart(cpcol_2_gid[cpid]);
537: PetscInt slid_new=sgid_new-my0,hav=0;
538: PetscCDIntNd *pos;
540: /* search for this gid to see if I have it */
541: PetscCDGetHeadPos(aggs_2,slid_new,&pos);
542: while (pos) {
543: PetscInt gidj;
544: PetscCDIntNdGetID(pos, &gidj);
545: PetscCDGetNextPos(aggs_2,slid_new,&pos);
547: if (gidj == gid) { hav = 1; break; }
548: }
549: if (hav != 1) {
550: /* insert 'flidj' into head of llist */
551: PetscCDAppendID(aggs_2, slid_new, gid);
552: }
553: }
554: }
556: VecRestoreArray(mpimat_1->lvec, &cpcol_1_state);
557: VecRestoreArray(mpimat_2->lvec, &cpcol_2_state);
558: VecRestoreArray(ghostparents2, &cpcol_2_parent);
559: VecRestoreArray(ghostgids2, &cpcol_2_gid);
560: PetscFree(lid_cprowID_1);
561: VecDestroy(&ghostgids2);
562: VecDestroy(&ghostparents2);
563: VecDestroy(&ghost_par_orig2);
564: }
566: PetscFree2(lid_state,lid_parent_gid);
567: return(0);
568: }
570: /* -------------------------------------------------------------------------- */
571: /*
572: PCSetData_AGG - called if data is not set with PCSetCoordinates.
573: Looks in Mat for near null space.
574: Does not work for Stokes
576: Input Parameter:
577: . pc -
578: . a_A - matrix to get (near) null space out of.
579: */
580: static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A)
581: {
583: PC_MG *mg = (PC_MG*)pc->data;
584: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
585: MatNullSpace mnull;
588: MatGetNearNullSpace(a_A, &mnull);
589: if (!mnull) {
590: DM dm;
591: PCGetDM(pc, &dm);
592: if (!dm) {
593: MatGetDM(a_A, &dm);
594: }
595: if (dm) {
596: PetscObject deformation;
597: PetscInt Nf;
599: DMGetNumFields(dm, &Nf);
600: if (Nf) {
601: DMGetField(dm, 0, NULL, &deformation);
602: PetscObjectQuery((PetscObject)deformation,"nearnullspace",(PetscObject*)&mnull);
603: if (!mnull) {
604: PetscObjectQuery((PetscObject)deformation,"nullspace",(PetscObject*)&mnull);
605: }
606: }
607: }
608: }
610: if (!mnull) {
611: PetscInt bs,NN,MM;
612: MatGetBlockSize(a_A, &bs);
613: MatGetLocalSize(a_A, &MM, &NN);
614: if (MM % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MM %D must be divisible by bs %D",MM,bs);
615: PCSetCoordinates_AGG(pc, bs, MM/bs, NULL);
616: } else {
617: PetscReal *nullvec;
618: PetscBool has_const;
619: PetscInt i,j,mlocal,nvec,bs;
620: const Vec *vecs; const PetscScalar *v;
622: MatGetLocalSize(a_A,&mlocal,NULL);
623: MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs);
624: pc_gamg->data_sz = (nvec+!!has_const)*mlocal;
625: PetscMalloc1((nvec+!!has_const)*mlocal,&nullvec);
626: if (has_const) for (i=0; i<mlocal; i++) nullvec[i] = 1.0;
627: for (i=0; i<nvec; i++) {
628: VecGetArrayRead(vecs[i],&v);
629: for (j=0; j<mlocal; j++) nullvec[(i+!!has_const)*mlocal + j] = PetscRealPart(v[j]);
630: VecRestoreArrayRead(vecs[i],&v);
631: }
632: pc_gamg->data = nullvec;
633: pc_gamg->data_cell_cols = (nvec+!!has_const);
634: MatGetBlockSize(a_A, &bs);
635: pc_gamg->data_cell_rows = bs;
636: }
637: return(0);
638: }
640: /* -------------------------------------------------------------------------- */
641: /*
642: formProl0
644: Input Parameter:
645: . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices
646: . bs - row block size
647: . nSAvec - column bs of new P
648: . my0crs - global index of start of locals
649: . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec]
650: . data_in[data_stride*nSAvec] - local data on fine grid
651: . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist'
652: Output Parameter:
653: . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data
654: . a_Prol - prolongation operator
655: */
656: static PetscErrorCode formProl0(PetscCoarsenData *agg_llists,PetscInt bs,PetscInt nSAvec,PetscInt my0crs,PetscInt data_stride,PetscReal data_in[],const PetscInt flid_fgid[],PetscReal **a_data_out,Mat a_Prol)
657: {
658: PetscErrorCode ierr;
659: PetscInt Istart,my0,Iend,nloc,clid,flid = 0,aggID,kk,jj,ii,mm,ndone,nSelected,minsz,nghosts,out_data_stride;
660: MPI_Comm comm;
661: PetscMPIInt rank;
662: PetscReal *out_data;
663: PetscCDIntNd *pos;
664: PCGAMGHashTable fgid_flid;
667: PetscObjectGetComm((PetscObject)a_Prol,&comm);
668: MPI_Comm_rank(comm,&rank);
669: MatGetOwnershipRange(a_Prol, &Istart, &Iend);
670: nloc = (Iend-Istart)/bs; my0 = Istart/bs;
671: if ((Iend-Istart) % bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Iend %D - Istart %D must be divisible by bs %D",Iend,Istart,bs);
672: Iend /= bs;
673: nghosts = data_stride/bs - nloc;
675: PCGAMGHashTableCreate(2*nghosts+1, &fgid_flid);
676: for (kk=0; kk<nghosts; kk++) {
677: PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc+kk], nloc+kk);
678: }
680: /* count selected -- same as number of cols of P */
681: for (nSelected=mm=0; mm<nloc; mm++) {
682: PetscBool ise;
683: PetscCDEmptyAt(agg_llists, mm, &ise);
684: if (!ise) nSelected++;
685: }
686: MatGetOwnershipRangeColumn(a_Prol, &ii, &jj);
687: if ((ii/nSAvec) != my0crs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D /nSAvec %D != my0crs %D",ii,nSAvec,my0crs);
688: if (nSelected != (jj-ii)/nSAvec) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nSelected %D != (jj %D - ii %D)/nSAvec %D",nSelected,jj,ii,nSAvec);
690: /* aloc space for coarse point data (output) */
691: out_data_stride = nSelected*nSAvec;
693: PetscMalloc1(out_data_stride*nSAvec, &out_data);
694: for (ii=0;ii<out_data_stride*nSAvec;ii++) out_data[ii]=PETSC_MAX_REAL;
695: *a_data_out = out_data; /* output - stride nSelected*nSAvec */
697: /* find points and set prolongation */
698: minsz = 100;
699: ndone = 0;
700: for (mm = clid = 0; mm < nloc; mm++) {
701: PetscCDSizeAt(agg_llists, mm, &jj);
702: if (jj > 0) {
703: const PetscInt lid = mm, cgid = my0crs + clid;
704: PetscInt cids[100]; /* max bs */
705: PetscBLASInt asz =jj,M=asz*bs,N=nSAvec,INFO;
706: PetscBLASInt Mdata=M+((N-M>0) ? N-M : 0),LDA=Mdata,LWORK=N*bs;
707: PetscScalar *qqc,*qqr,*TAU,*WORK;
708: PetscInt *fids;
709: PetscReal *data;
711: /* count agg */
712: if (asz<minsz) minsz = asz;
714: /* get block */
715: PetscMalloc5(Mdata*N, &qqc,M*N, &qqr,N, &TAU,LWORK, &WORK,M, &fids);
717: aggID = 0;
718: PetscCDGetHeadPos(agg_llists,lid,&pos);
719: while (pos) {
720: PetscInt gid1;
721: PetscCDIntNdGetID(pos, &gid1);
722: PetscCDGetNextPos(agg_llists,lid,&pos);
724: if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0;
725: else {
726: PCGAMGHashTableFind(&fgid_flid, gid1, &flid);
727: if (flid < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cannot find gid1 in table");
728: }
729: /* copy in B_i matrix - column oriented */
730: data = &data_in[flid*bs];
731: for (ii = 0; ii < bs; ii++) {
732: for (jj = 0; jj < N; jj++) {
733: PetscReal d = data[jj*data_stride + ii];
734: qqc[jj*Mdata + aggID*bs + ii] = d;
735: }
736: }
737: /* set fine IDs */
738: for (kk=0; kk<bs; kk++) fids[aggID*bs + kk] = flid_fgid[flid]*bs + kk;
739: aggID++;
740: }
742: /* pad with zeros */
743: for (ii = asz*bs; ii < Mdata; ii++) {
744: for (jj = 0; jj < N; jj++, kk++) {
745: qqc[jj*Mdata + ii] = .0;
746: }
747: }
749: ndone += aggID;
750: /* QR */
751: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
752: PetscStackCallBLAS("LAPACKgeqrf",LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
753: PetscFPTrapPop();
754: if (INFO != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"xGEQRF error");
755: /* get R - column oriented - output B_{i+1} */
756: {
757: PetscReal *data = &out_data[clid*nSAvec];
758: for (jj = 0; jj < nSAvec; jj++) {
759: for (ii = 0; ii < nSAvec; ii++) {
760: if (data[jj*out_data_stride + ii] != PETSC_MAX_REAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"data[jj*out_data_stride + ii] != %e",PETSC_MAX_REAL);
761: if (ii <= jj) data[jj*out_data_stride + ii] = PetscRealPart(qqc[jj*Mdata + ii]);
762: else data[jj*out_data_stride + ii] = 0.;
763: }
764: }
765: }
767: /* get Q - row oriented */
768: PetscStackCallBLAS("LAPACKorgqr",LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO));
769: if (INFO != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"xORGQR error arg %d",-INFO);
771: for (ii = 0; ii < M; ii++) {
772: for (jj = 0; jj < N; jj++) {
773: qqr[N*ii + jj] = qqc[jj*Mdata + ii];
774: }
775: }
777: /* add diagonal block of P0 */
778: for (kk=0; kk<N; kk++) {
779: cids[kk] = N*cgid + kk; /* global col IDs in P0 */
780: }
781: MatSetValues(a_Prol,M,fids,N,cids,qqr,INSERT_VALUES);
782: PetscFree5(qqc,qqr,TAU,WORK,fids);
783: clid++;
784: } /* coarse agg */
785: } /* for all fine nodes */
786: MatAssemblyBegin(a_Prol,MAT_FINAL_ASSEMBLY);
787: MatAssemblyEnd(a_Prol,MAT_FINAL_ASSEMBLY);
788: PCGAMGHashTableDestroy(&fgid_flid);
789: return(0);
790: }
792: static PetscErrorCode PCView_GAMG_AGG(PC pc,PetscViewer viewer)
793: {
795: PC_MG *mg = (PC_MG*)pc->data;
796: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
797: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
800: PetscViewerASCIIPrintf(viewer," AGG specific options\n");
801: PetscViewerASCIIPrintf(viewer," Symmetric graph %s\n",pc_gamg_agg->sym_graph ? "true" : "false");
802: PetscViewerASCIIPrintf(viewer," Number of levels to square graph %D\n",pc_gamg_agg->square_graph);
803: PetscViewerASCIIPrintf(viewer," Number smoothing steps %D\n",pc_gamg_agg->nsmooths);
804: return(0);
805: }
807: /* -------------------------------------------------------------------------- */
808: /*
809: PCGAMGGraph_AGG
811: Input Parameter:
812: . pc - this
813: . Amat - matrix on this fine level
814: Output Parameter:
815: . a_Gmat -
816: */
817: static PetscErrorCode PCGAMGGraph_AGG(PC pc,Mat Amat,Mat *a_Gmat)
818: {
819: PetscErrorCode ierr;
820: PC_MG *mg = (PC_MG*)pc->data;
821: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
822: const PetscReal vfilter = pc_gamg->threshold[pc_gamg->current_level];
823: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
824: Mat Gmat;
825: MPI_Comm comm;
826: PetscBool /* set,flg , */ symm;
829: PetscObjectGetComm((PetscObject)Amat,&comm);
830: PetscLogEventBegin(PC_GAMGGraph_AGG,0,0,0,0);
832: /* MatIsSymmetricKnown(Amat, &set, &flg); || !(set && flg) -- this causes lot of symm calls */
833: symm = (PetscBool)(pc_gamg_agg->sym_graph); /* && !pc_gamg_agg->square_graph; */
835: PCGAMGCreateGraph(Amat, &Gmat);
836: PCGAMGFilterGraph(&Gmat, vfilter, symm);
837: *a_Gmat = Gmat;
838: PetscLogEventEnd(PC_GAMGGraph_AGG,0,0,0,0);
839: return(0);
840: }
842: /* -------------------------------------------------------------------------- */
843: /*
844: PCGAMGCoarsen_AGG
846: Input Parameter:
847: . a_pc - this
848: Input/Output Parameter:
849: . a_Gmat1 - graph on this fine level - coarsening can change this (squares it)
850: Output Parameter:
851: . agg_lists - list of aggregates
852: */
853: static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc,Mat *a_Gmat1,PetscCoarsenData **agg_lists)
854: {
856: PC_MG *mg = (PC_MG*)a_pc->data;
857: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
858: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
859: Mat mat,Gmat2, Gmat1 = *a_Gmat1; /* squared graph */
860: IS perm;
861: PetscInt Istart,Iend,Ii,nloc,bs,n,m;
862: PetscInt *permute;
863: PetscBool *bIndexSet;
864: MatCoarsen crs;
865: MPI_Comm comm;
866: PetscMPIInt rank;
867: PetscReal hashfact;
868: PetscInt iSwapIndex;
869: PetscRandom random;
872: PetscLogEventBegin(PC_GAMGCoarsen_AGG,0,0,0,0);
873: PetscObjectGetComm((PetscObject)Gmat1,&comm);
874: MPI_Comm_rank(comm, &rank);
875: MatGetLocalSize(Gmat1, &n, &m);
876: MatGetBlockSize(Gmat1, &bs);
877: if (bs != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bs %D must be 1",bs);
878: nloc = n/bs;
880: if (pc_gamg->current_level < pc_gamg_agg->square_graph) {
881: PetscInfo2(a_pc,"Square Graph on level %D of %D to square\n",pc_gamg->current_level+1,pc_gamg_agg->square_graph);
882: MatTransposeMatMult(Gmat1, Gmat1, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &Gmat2);
883: } else Gmat2 = Gmat1;
885: /* get MIS aggs - randomize */
886: PetscMalloc1(nloc, &permute);
887: PetscCalloc1(nloc, &bIndexSet);
888: for (Ii = 0; Ii < nloc; Ii++) {
889: permute[Ii] = Ii;
890: }
891: PetscRandomCreate(PETSC_COMM_SELF,&random);
892: MatGetOwnershipRange(Gmat1, &Istart, &Iend);
893: for (Ii = 0; Ii < nloc; Ii++) {
894: PetscRandomGetValueReal(random,&hashfact);
895: iSwapIndex = (PetscInt) (hashfact*nloc)%nloc;
896: if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) {
897: PetscInt iTemp = permute[iSwapIndex];
898: permute[iSwapIndex] = permute[Ii];
899: permute[Ii] = iTemp;
900: bIndexSet[iSwapIndex] = PETSC_TRUE;
901: }
902: }
903: PetscFree(bIndexSet);
904: PetscRandomDestroy(&random);
905: ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm);
906: #if defined PETSC_GAMG_USE_LOG
907: PetscLogEventBegin(petsc_gamg_setup_events[SET4],0,0,0,0);
908: #endif
909: MatCoarsenCreate(comm, &crs);
910: MatCoarsenSetFromOptions(crs);
911: MatCoarsenSetGreedyOrdering(crs, perm);
912: MatCoarsenSetAdjacency(crs, Gmat2);
913: MatCoarsenSetStrictAggs(crs, PETSC_TRUE);
914: MatCoarsenApply(crs);
915: MatCoarsenGetData(crs, agg_lists); /* output */
916: MatCoarsenDestroy(&crs);
918: ISDestroy(&perm);
919: PetscFree(permute);
920: #if defined PETSC_GAMG_USE_LOG
921: PetscLogEventEnd(petsc_gamg_setup_events[SET4],0,0,0,0);
922: #endif
924: /* smooth aggs */
925: if (Gmat2 != Gmat1) {
926: const PetscCoarsenData *llist = *agg_lists;
927: smoothAggs(a_pc,Gmat2, Gmat1, *agg_lists);
928: MatDestroy(&Gmat1);
929: *a_Gmat1 = Gmat2; /* output */
930: PetscCDGetMat(llist, &mat);
931: if (mat) SETERRQ(comm,PETSC_ERR_ARG_WRONG, "Auxilary matrix with squared graph????");
932: } else {
933: const PetscCoarsenData *llist = *agg_lists;
934: /* see if we have a matrix that takes precedence (returned from MatCoarsenApply) */
935: PetscCDGetMat(llist, &mat);
936: if (mat) {
937: MatDestroy(&Gmat1);
938: *a_Gmat1 = mat; /* output */
939: }
940: }
941: PetscLogEventEnd(PC_GAMGCoarsen_AGG,0,0,0,0);
942: return(0);
943: }
945: /* -------------------------------------------------------------------------- */
946: /*
947: PCGAMGProlongator_AGG
949: Input Parameter:
950: . pc - this
951: . Amat - matrix on this fine level
952: . Graph - used to get ghost data for nodes in
953: . agg_lists - list of aggregates
954: Output Parameter:
955: . a_P_out - prolongation operator to the next level
956: */
957: static PetscErrorCode PCGAMGProlongator_AGG(PC pc,Mat Amat,Mat Gmat,PetscCoarsenData *agg_lists,Mat *a_P_out)
958: {
959: PC_MG *mg = (PC_MG*)pc->data;
960: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
961: const PetscInt col_bs = pc_gamg->data_cell_cols;
963: PetscInt Istart,Iend,nloc,ii,jj,kk,my0,nLocalSelected,bs;
964: Mat Prol;
965: PetscMPIInt rank, size;
966: MPI_Comm comm;
967: PetscReal *data_w_ghost;
968: PetscInt myCrs0, nbnodes=0, *flid_fgid;
969: MatType mtype;
972: PetscObjectGetComm((PetscObject)Amat,&comm);
973: if (col_bs < 1) SETERRQ(comm,PETSC_ERR_PLIB,"Column bs cannot be less than 1");
974: PetscLogEventBegin(PC_GAMGProlongator_AGG,0,0,0,0);
975: MPI_Comm_rank(comm, &rank);
976: MPI_Comm_size(comm, &size);
977: MatGetOwnershipRange(Amat, &Istart, &Iend);
978: MatGetBlockSize(Amat, &bs);
979: nloc = (Iend-Istart)/bs; my0 = Istart/bs;
980: if ((Iend-Istart) % bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"(Iend %D - Istart %D) not divisible by bs %D",Iend,Istart,bs);
982: /* get 'nLocalSelected' */
983: for (ii=0, nLocalSelected = 0; ii < nloc; ii++) {
984: PetscBool ise;
985: /* filter out singletons 0 or 1? */
986: PetscCDEmptyAt(agg_lists, ii, &ise);
987: if (!ise) nLocalSelected++;
988: }
990: /* create prolongator, create P matrix */
991: MatGetType(Amat,&mtype);
992: MatCreate(comm, &Prol);
993: MatSetSizes(Prol,nloc*bs,nLocalSelected*col_bs,PETSC_DETERMINE,PETSC_DETERMINE);
994: MatSetBlockSizes(Prol, bs, col_bs);
995: MatSetType(Prol, mtype);
996: MatSeqAIJSetPreallocation(Prol, col_bs, NULL);
997: MatMPIAIJSetPreallocation(Prol,col_bs, NULL,col_bs, NULL);
999: /* can get all points "removed" */
1000: MatGetSize(Prol, &kk, &ii);
1001: if (!ii) {
1002: PetscInfo(pc,"No selected points on coarse grid\n");
1003: MatDestroy(&Prol);
1004: *a_P_out = NULL; /* out */
1005: PetscLogEventEnd(PC_GAMGProlongator_AGG,0,0,0,0);
1006: return(0);
1007: }
1008: PetscInfo1(pc,"New grid %D nodes\n",ii/col_bs);
1009: MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk);
1011: if ((kk-myCrs0) % col_bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"(kk %D -myCrs0 %D) not divisible by col_bs %D",kk,myCrs0,col_bs);
1012: myCrs0 = myCrs0/col_bs;
1013: if ((kk/col_bs-myCrs0) != nLocalSelected) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"(kk %D/col_bs %D - myCrs0 %D) != nLocalSelected %D)",kk,col_bs,myCrs0,nLocalSelected);
1015: /* create global vector of data in 'data_w_ghost' */
1016: #if defined PETSC_GAMG_USE_LOG
1017: PetscLogEventBegin(petsc_gamg_setup_events[SET7],0,0,0,0);
1018: #endif
1019: if (size > 1) { /* */
1020: PetscReal *tmp_gdata,*tmp_ldata,*tp2;
1021: PetscMalloc1(nloc, &tmp_ldata);
1022: for (jj = 0; jj < col_bs; jj++) {
1023: for (kk = 0; kk < bs; kk++) {
1024: PetscInt ii,stride;
1025: const PetscReal *tp = pc_gamg->data + jj*bs*nloc + kk;
1026: for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp;
1028: PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata);
1030: if (!jj && !kk) { /* now I know how many todal nodes - allocate */
1031: PetscMalloc1(stride*bs*col_bs, &data_w_ghost);
1032: nbnodes = bs*stride;
1033: }
1034: tp2 = data_w_ghost + jj*bs*stride + kk;
1035: for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii];
1036: PetscFree(tmp_gdata);
1037: }
1038: }
1039: PetscFree(tmp_ldata);
1040: } else {
1041: nbnodes = bs*nloc;
1042: data_w_ghost = (PetscReal*)pc_gamg->data;
1043: }
1045: /* get P0 */
1046: if (size > 1) {
1047: PetscReal *fid_glid_loc,*fiddata;
1048: PetscInt stride;
1050: PetscMalloc1(nloc, &fid_glid_loc);
1051: for (kk=0; kk<nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0+kk);
1052: PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata);
1053: PetscMalloc1(stride, &flid_fgid);
1054: for (kk=0; kk<stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk];
1055: PetscFree(fiddata);
1057: if (stride != nbnodes/bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"stride %D != nbnodes %D/bs %D",stride,nbnodes,bs);
1058: PetscFree(fid_glid_loc);
1059: } else {
1060: PetscMalloc1(nloc, &flid_fgid);
1061: for (kk=0; kk<nloc; kk++) flid_fgid[kk] = my0 + kk;
1062: }
1063: #if defined PETSC_GAMG_USE_LOG
1064: PetscLogEventEnd(petsc_gamg_setup_events[SET7],0,0,0,0);
1065: PetscLogEventBegin(petsc_gamg_setup_events[SET8],0,0,0,0);
1066: #endif
1067: {
1068: PetscReal *data_out = NULL;
1069: formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes,data_w_ghost, flid_fgid, &data_out, Prol);
1070: PetscFree(pc_gamg->data);
1072: pc_gamg->data = data_out;
1073: pc_gamg->data_cell_rows = col_bs;
1074: pc_gamg->data_sz = col_bs*col_bs*nLocalSelected;
1075: }
1076: #if defined PETSC_GAMG_USE_LOG
1077: PetscLogEventEnd(petsc_gamg_setup_events[SET8],0,0,0,0);
1078: #endif
1079: if (size > 1) {PetscFree(data_w_ghost);}
1080: PetscFree(flid_fgid);
1082: *a_P_out = Prol; /* out */
1084: PetscLogEventEnd(PC_GAMGProlongator_AGG,0,0,0,0);
1085: return(0);
1086: }
1088: /* -------------------------------------------------------------------------- */
1089: /*
1090: PCGAMGOptProlongator_AGG
1092: Input Parameter:
1093: . pc - this
1094: . Amat - matrix on this fine level
1095: In/Output Parameter:
1096: . a_P - prolongation operator to the next level
1097: */
1098: static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc,Mat Amat,Mat *a_P)
1099: {
1101: PC_MG *mg = (PC_MG*)pc->data;
1102: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1103: PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG*)pc_gamg->subctx;
1104: PetscInt jj;
1105: Mat Prol = *a_P;
1106: MPI_Comm comm;
1107: KSP eksp;
1108: Vec bb, xx;
1109: PC epc;
1110: PetscReal alpha, emax, emin;
1111: PetscRandom random;
1114: PetscObjectGetComm((PetscObject)Amat,&comm);
1115: PetscLogEventBegin(PC_GAMGOptProlongator_AGG,0,0,0,0);
1117: /* compute maximum value of operator to be used in smoother */
1118: if (0 < pc_gamg_agg->nsmooths) {
1119: MatCreateVecs(Amat, &bb, 0);
1120: MatCreateVecs(Amat, &xx, 0);
1121: PetscRandomCreate(PETSC_COMM_SELF,&random);
1122: VecSetRandom(bb,random);
1123: PetscRandomDestroy(&random);
1125: KSPCreate(comm,&eksp);
1126: KSPSetErrorIfNotConverged(eksp,pc->erroriffailure);
1127: KSPSetTolerances(eksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,10);
1128: KSPSetNormType(eksp, KSP_NORM_NONE);
1130: KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
1131: KSPSetOperators(eksp, Amat, Amat);
1132: KSPSetComputeSingularValues(eksp,PETSC_TRUE);
1134: KSPGetPC(eksp, &epc);
1135: PCSetType(epc, PCJACOBI); /* smoother in smoothed agg. */
1137: KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
1138: KSPAppendOptionsPrefix(eksp, "gamg_est_");
1139: KSPSetFromOptions(eksp);
1141: /* solve - keep stuff out of logging */
1142: PetscLogEventDeactivate(KSP_Solve);
1143: PetscLogEventDeactivate(PC_Apply);
1144: KSPSolve(eksp, bb, xx);
1145: KSPCheckSolve(eksp,pc,xx);
1146: PetscLogEventActivate(KSP_Solve);
1147: PetscLogEventActivate(PC_Apply);
1149: KSPComputeExtremeSingularValues(eksp, &emax, &emin);
1150: PetscInfo3(pc,"Smooth P0: max eigen=%e min=%e PC=%s\n",emax,emin,PCJACOBI);
1151: VecDestroy(&xx);
1152: VecDestroy(&bb);
1153: KSPDestroy(&eksp);
1154: }
1156: /* smooth P0 */
1157: for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) {
1158: Mat tMat;
1159: Vec diag;
1161: #if defined PETSC_GAMG_USE_LOG
1162: PetscLogEventBegin(petsc_gamg_setup_events[SET9],0,0,0,0);
1163: #endif
1165: /* smooth P1 := (I - omega/lam D^{-1}A)P0 */
1166: MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat);
1167: MatCreateVecs(Amat, &diag, 0);
1168: MatGetDiagonal(Amat, diag); /* effectively PCJACOBI */
1169: VecReciprocal(diag);
1170: MatDiagonalScale(tMat, diag, 0);
1171: VecDestroy(&diag);
1172: alpha = -1.4/emax;
1173: MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN);
1174: MatDestroy(&Prol);
1175: Prol = tMat;
1176: #if defined PETSC_GAMG_USE_LOG
1177: PetscLogEventEnd(petsc_gamg_setup_events[SET9],0,0,0,0);
1178: #endif
1179: }
1180: PetscLogEventEnd(PC_GAMGOptProlongator_AGG,0,0,0,0);
1181: *a_P = Prol;
1182: return(0);
1183: }
1185: /* -------------------------------------------------------------------------- */
1186: /*
1187: PCCreateGAMG_AGG
1189: Input Parameter:
1190: . pc -
1191: */
1192: PetscErrorCode PCCreateGAMG_AGG(PC pc)
1193: {
1195: PC_MG *mg = (PC_MG*)pc->data;
1196: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1197: PC_GAMG_AGG *pc_gamg_agg;
1200: /* create sub context for SA */
1201: PetscNewLog(pc,&pc_gamg_agg);
1202: pc_gamg->subctx = pc_gamg_agg;
1204: pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG;
1205: pc_gamg->ops->destroy = PCDestroy_GAMG_AGG;
1206: /* reset does not do anything; setup not virtual */
1208: /* set internal function pointers */
1209: pc_gamg->ops->graph = PCGAMGGraph_AGG;
1210: pc_gamg->ops->coarsen = PCGAMGCoarsen_AGG;
1211: pc_gamg->ops->prolongator = PCGAMGProlongator_AGG;
1212: pc_gamg->ops->optprolongator = PCGAMGOptProlongator_AGG;
1213: pc_gamg->ops->createdefaultdata = PCSetData_AGG;
1214: pc_gamg->ops->view = PCView_GAMG_AGG;
1216: pc_gamg_agg->square_graph = 1;
1217: pc_gamg_agg->sym_graph = PETSC_FALSE;
1218: pc_gamg_agg->nsmooths = 1;
1220: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNSmooths_C",PCGAMGSetNSmooths_AGG);
1221: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetSymGraph_C",PCGAMGSetSymGraph_AGG);
1222: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetSquareGraph_C",PCGAMGSetSquareGraph_AGG);
1223: PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",PCSetCoordinates_AGG);
1224: return(0);
1225: }