Actual source code: gamg.c
petsc-3.5.4 2015-05-23
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include <petsc-private/matimpl.h>
5: #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6: #include <petsc-private/kspimpl.h>
7: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */
9: #if defined PETSC_GAMG_USE_LOG
10: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
11: #endif
13: #if defined PETSC_USE_LOG
14: PetscLogEvent PC_GAMGGgraph_AGG;
15: PetscLogEvent PC_GAMGGgraph_GEO;
16: PetscLogEvent PC_GAMGCoarsen_AGG;
17: PetscLogEvent PC_GAMGCoarsen_GEO;
18: PetscLogEvent PC_GAMGProlongator_AGG;
19: PetscLogEvent PC_GAMGProlongator_GEO;
20: PetscLogEvent PC_GAMGOptprol_AGG;
21: #endif
23: #define GAMG_MAXLEVELS 30
25: /* #define GAMG_STAGES */
26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28: #endif
30: static PetscFunctionList GAMGList = 0;
31: static PetscBool PCGAMGPackageInitialized;
33: /* ----------------------------------------------------------------------------- */
36: PetscErrorCode PCReset_GAMG(PC pc)
37: {
39: PC_MG *mg = (PC_MG*)pc->data;
40: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
43: if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
44: PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
45: PetscFree(pc_gamg->data);
46: }
47: pc_gamg->data = NULL; pc_gamg->data_sz = 0;
49: if (pc_gamg->orig_data) {
50: PetscFree(pc_gamg->orig_data);
51: }
52: return(0);
53: }
55: /* -------------------------------------------------------------------------- */
56: /*
57: createLevel: create coarse op with RAP. repartition and/or reduce number
58: of active processors.
60: Input Parameter:
61: . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
62: 'pc_gamg->data_sz' are changed via repartitioning/reduction.
63: . Amat_fine - matrix on this fine (k) level
64: . cr_bs - coarse block size
65: . isLast -
66: In/Output Parameter:
67: . a_P_inout - prolongation operator to the next level (k-->k-1)
68: . a_nactive_proc - number of active procs
69: Output Parameter:
70: . a_Amat_crs - coarse matrix that is created (k-1)
71: */
75: static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,
76: Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc)
77: {
78: PetscErrorCode ierr;
79: PC_MG *mg = (PC_MG*)pc->data;
80: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
81: const PetscBool repart = pc_gamg->repart;
82: const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
83: Mat Cmat,Pold=*a_P_inout;
84: MPI_Comm comm;
85: PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc;
86: PetscInt ncrs_eq,ncrs,f_bs;
89: PetscObjectGetComm((PetscObject)Amat_fine,&comm);
90: MPI_Comm_rank(comm, &rank);
91: MPI_Comm_size(comm, &size);
92: MatGetBlockSize(Amat_fine, &f_bs);
93: /* RAP */
94: MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);
96: /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
97: MatGetLocalSize(Cmat, &ncrs_eq, NULL);
98: if (pc_gamg->data_cell_rows>0) {
99: ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
100: }
101: else {
102: PetscInt bs;
103: MatGetBlockSize(Cmat, &bs);
104: ncrs = ncrs_eq/bs;
105: }
107: /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
108: {
109: PetscInt ncrs_eq_glob;
110: MatGetSize(Cmat, &ncrs_eq_glob, NULL);
111: new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
112: if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1;
113: else if (new_size >= nactive) new_size = nactive; /* no change, rare */
114: if (isLast) new_size = 1;
115: }
117: if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
118: else {
119: PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
120: IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;
122: nloc_old = ncrs_eq/cr_bs;
123: if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
124: #if defined PETSC_GAMG_USE_LOG
125: PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
126: #endif
127: /* make 'is_eq_newproc' */
128: PetscMalloc1(size, &counts);
129: if (repart) {
130: /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
131: Mat adj;
133: if (pc_gamg->verbose>0) {
134: if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
135: else {
136: PetscInt n;
137: MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);
138: PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
139: }
140: }
142: /* get 'adj' */
143: if (cr_bs == 1) {
144: MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
145: } else {
146: /* make a scalar matrix to partition (no Stokes here) */
147: Mat tMat;
148: PetscInt Istart_crs,Iend_crs,ncols,jj,Ii;
149: const PetscScalar *vals;
150: const PetscInt *idx;
151: PetscInt *d_nnz, *o_nnz, M, N;
152: static PetscInt llev = 0;
154: PetscMalloc1(ncrs, &d_nnz);
155: PetscMalloc1(ncrs, &o_nnz);
156: MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
157: MatGetSize(Cmat, &M, &N);
158: for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
159: MatGetRow(Cmat,Ii,&ncols,0,0);
160: d_nnz[jj] = ncols/cr_bs;
161: o_nnz[jj] = ncols/cr_bs;
162: MatRestoreRow(Cmat,Ii,&ncols,0,0);
163: if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
164: if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
165: }
167: MatCreate(comm, &tMat);
168: MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
169: MatSetType(tMat,MATAIJ);
170: MatSeqAIJSetPreallocation(tMat,0,d_nnz);
171: MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
172: PetscFree(d_nnz);
173: PetscFree(o_nnz);
175: for (ii = Istart_crs; ii < Iend_crs; ii++) {
176: PetscInt dest_row = ii/cr_bs;
177: MatGetRow(Cmat,ii,&ncols,&idx,&vals);
178: for (jj = 0; jj < ncols; jj++) {
179: PetscInt dest_col = idx[jj]/cr_bs;
180: PetscScalar v = 1.0;
181: MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
182: }
183: MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
184: }
185: MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
186: MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);
188: if (llev++ == -1) {
189: PetscViewer viewer; char fname[32];
190: PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
191: PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
192: MatView(tMat, viewer);
193: PetscViewerDestroy(&viewer);
194: }
196: MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
198: MatDestroy(&tMat);
199: } /* create 'adj' */
201: { /* partition: get newproc_idx */
202: char prefix[256];
203: const char *pcpre;
204: const PetscInt *is_idx;
205: MatPartitioning mpart;
206: IS proc_is;
207: PetscInt targetPE;
209: MatPartitioningCreate(comm, &mpart);
210: MatPartitioningSetAdjacency(mpart, adj);
211: PCGetOptionsPrefix(pc, &pcpre);
212: PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
213: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
214: MatPartitioningSetFromOptions(mpart);
215: MatPartitioningSetNParts(mpart, new_size);
216: MatPartitioningApply(mpart, &proc_is);
217: MatPartitioningDestroy(&mpart);
219: /* collect IS info */
220: PetscMalloc1(ncrs_eq, &newproc_idx);
221: ISGetIndices(proc_is, &is_idx);
222: targetPE = 1; /* bring to "front" of machine */
223: /*targetPE = size/new_size;*/ /* spread partitioning across machine */
224: for (kk = jj = 0 ; kk < nloc_old ; kk++) {
225: for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
226: newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
227: }
228: }
229: ISRestoreIndices(proc_is, &is_idx);
230: ISDestroy(&proc_is);
231: }
232: MatDestroy(&adj);
234: ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
235: if (newproc_idx != 0) {
236: PetscFree(newproc_idx);
237: }
238: } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
240: PetscInt rfactor,targetPE;
241: /* find factor */
242: if (new_size == 1) rfactor = size; /* easy */
243: else {
244: PetscReal best_fact = 0.;
245: jj = -1;
246: for (kk = 1 ; kk <= size ; kk++) {
247: if (size%kk==0) { /* a candidate */
248: PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
249: if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
250: if (fact > best_fact) {
251: best_fact = fact; jj = kk;
252: }
253: }
254: }
255: if (jj != -1) rfactor = jj;
256: else rfactor = 1; /* does this happen .. a prime */
257: }
258: new_size = size/rfactor;
260: if (new_size==nactive) {
261: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
262: PetscFree(counts);
263: if (pc_gamg->verbose>0) {
264: PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
265: }
266: return(0);
267: }
269: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
270: targetPE = rank/rfactor;
271: ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
272: } /* end simple 'is_eq_newproc' */
274: /*
275: Create an index set from the is_eq_newproc index set to indicate the mapping TO
276: */
277: ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
278: is_eq_num_prim = is_eq_num;
279: /*
280: Determine how many equations/vertices are assigned to each processor
281: */
282: ISPartitioningCount(is_eq_newproc, size, counts);
283: ncrs_eq_new = counts[rank];
284: ISDestroy(&is_eq_newproc);
285: ncrs_new = ncrs_eq_new/cr_bs; /* eqs */
287: PetscFree(counts);
288: #if defined PETSC_GAMG_USE_LOG
289: PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
290: #endif
291: /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
292: {
293: Vec src_crd, dest_crd;
294: const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
295: VecScatter vecscat;
296: PetscScalar *array;
297: IS isscat;
299: /* move data (for primal equations only) */
300: /* Create a vector to contain the newly ordered element information */
301: VecCreate(comm, &dest_crd);
302: VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
303: VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
304: /*
305: There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
306: a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'.
307: */
308: PetscMalloc1((ncrs*node_data_sz), &tidx);
309: ISGetIndices(is_eq_num_prim, &idx);
310: for (ii=0,jj=0; ii<ncrs; ii++) {
311: PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
312: for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
313: }
314: ISRestoreIndices(is_eq_num_prim, &idx);
315: ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
316: PetscFree(tidx);
317: /*
318: Create a vector to contain the original vertex information for each element
319: */
320: VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
321: for (jj=0; jj<ndata_cols; jj++) {
322: const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
323: for (ii=0; ii<ncrs; ii++) {
324: for (kk=0; kk<ndata_rows; kk++) {
325: PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
326: PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
327: VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
328: }
329: }
330: }
331: VecAssemblyBegin(src_crd);
332: VecAssemblyEnd(src_crd);
333: /*
334: Scatter the element vertex information (still in the original vertex ordering)
335: to the correct processor
336: */
337: VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
338: ISDestroy(&isscat);
339: VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
340: VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
341: VecScatterDestroy(&vecscat);
342: VecDestroy(&src_crd);
343: /*
344: Put the element vertex data into a new allocation of the gdata->ele
345: */
346: PetscFree(pc_gamg->data);
347: PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);
349: pc_gamg->data_sz = node_data_sz*ncrs_new;
350: strideNew = ncrs_new*ndata_rows;
352: VecGetArray(dest_crd, &array);
353: for (jj=0; jj<ndata_cols; jj++) {
354: for (ii=0; ii<ncrs_new; ii++) {
355: for (kk=0; kk<ndata_rows; kk++) {
356: PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
357: pc_gamg->data[ix] = PetscRealPart(array[jx]);
358: }
359: }
360: }
361: VecRestoreArray(dest_crd, &array);
362: VecDestroy(&dest_crd);
363: }
364: /* move A and P (columns) with new layout */
365: #if defined PETSC_GAMG_USE_LOG
366: PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
367: #endif
369: /*
370: Invert for MatGetSubMatrix
371: */
372: ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
373: ISSort(new_eq_indices); /* is this needed? */
374: ISSetBlockSize(new_eq_indices, cr_bs);
375: if (is_eq_num != is_eq_num_prim) {
376: ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
377: }
378: ISDestroy(&is_eq_num);
379: #if defined PETSC_GAMG_USE_LOG
380: PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
381: PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
382: #endif
383: /* 'a_Amat_crs' output */
384: {
385: Mat mat;
386: MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
387: *a_Amat_crs = mat;
389: if (!PETSC_TRUE) {
390: PetscInt cbs, rbs;
391: MatGetBlockSizes(Cmat, &rbs, &cbs);
392: PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
393: MatGetBlockSizes(mat, &rbs, &cbs);
394: PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
395: }
396: }
397: MatDestroy(&Cmat);
399: #if defined PETSC_GAMG_USE_LOG
400: PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
401: #endif
402: /* prolongator */
403: {
404: IS findices;
405: PetscInt Istart,Iend;
406: Mat Pnew;
407: MatGetOwnershipRange(Pold, &Istart, &Iend);
408: #if defined PETSC_GAMG_USE_LOG
409: PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
410: #endif
411: ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
412: ISSetBlockSize(findices,f_bs);
413: MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
414: ISDestroy(&findices);
416: if (!PETSC_TRUE) {
417: PetscInt cbs, rbs;
418: MatGetBlockSizes(Pold, &rbs, &cbs);
419: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
420: MatGetBlockSizes(Pnew, &rbs, &cbs);
421: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
422: }
423: #if defined PETSC_GAMG_USE_LOG
424: PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
425: #endif
426: MatDestroy(a_P_inout);
428: /* output - repartitioned */
429: *a_P_inout = Pnew;
430: }
431: ISDestroy(&new_eq_indices);
433: *a_nactive_proc = new_size; /* output */
434: }
436: /* outout matrix data */
437: if (!PETSC_TRUE) {
438: PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
439: if (llev==0) {
440: sprintf(fname,"Cmat_%d.m",llev++);
441: PetscViewerASCIIOpen(comm,fname,&viewer);
442: PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
443: MatView(Amat_fine, viewer);
444: PetscViewerDestroy(&viewer);
445: }
446: sprintf(fname,"Cmat_%d.m",llev++);
447: PetscViewerASCIIOpen(comm,fname,&viewer);
448: PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
449: MatView(Cmat, viewer);
450: PetscViewerDestroy(&viewer);
451: }
452: return(0);
453: }
455: /* -------------------------------------------------------------------------- */
456: /*
457: PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
458: by setting data structures and options.
460: Input Parameter:
461: . pc - the preconditioner context
463: Application Interface Routine: PCSetUp()
465: Notes:
466: The interface routine PCSetUp() is not usually called directly by
467: the user, but instead is called by PCApply() if necessary.
468: */
471: PetscErrorCode PCSetUp_GAMG(PC pc)
472: {
474: PC_MG *mg = (PC_MG*)pc->data;
475: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
476: Mat Pmat = pc->pmat;
477: PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
478: MPI_Comm comm;
479: PetscMPIInt rank,size,nactivepe;
480: Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
481: PetscReal emaxs[GAMG_MAXLEVELS];
482: IS *ASMLocalIDsArr[GAMG_MAXLEVELS];
483: PetscLogDouble nnz0=0.,nnztot=0.;
484: MatInfo info;
485: PetscBool redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);
488: PetscObjectGetComm((PetscObject)pc,&comm);
489: MPI_Comm_rank(comm,&rank);
490: MPI_Comm_size(comm,&size);
492: if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);
494: if (pc_gamg->setup_count++ > 0) {
495: if (redo_mesh_setup) {
496: /* reset everything */
497: PCReset_MG(pc);
498: pc->setupcalled = 0;
499: } else {
500: PC_MG_Levels **mglevels = mg->levels;
501: /* just do Galerkin grids */
502: Mat B,dA,dB;
504: if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
505: if (pc_gamg->Nlevels > 1) {
506: /* currently only handle case where mat and pmat are the same on coarser levels */
507: KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
508: /* (re)set to get dirty flag */
509: KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);
511: for (level=pc_gamg->Nlevels-2; level>=0; level--) {
512: /* the first time through the matrix structure has changed from repartitioning */
513: if (pc_gamg->setup_count==2) {
514: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
515: MatDestroy(&mglevels[level]->A);
517: mglevels[level]->A = B;
518: } else {
519: KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
520: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
521: }
522: KSPSetOperators(mglevels[level]->smoothd,B,B);
523: dB = B;
524: }
525: }
527: PCSetUp_MG(pc);
529: /* PCSetUp_MG seems to insists on setting this to GMRES */
530: KSPSetType(mglevels[0]->smoothd, KSPPREONLY);
531: return(0);
532: }
533: }
535: if (!pc_gamg->data) {
536: if (pc_gamg->orig_data) {
537: MatGetBlockSize(Pmat, &bs);
538: MatGetLocalSize(Pmat, &qq, NULL);
540: pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
541: pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
542: pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;
544: PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
545: for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
546: } else {
547: if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
548: pc_gamg->ops->createdefaultdata(pc,Pmat);
549: }
550: }
552: /* cache original data for reuse */
553: if (!pc_gamg->orig_data && redo_mesh_setup) {
554: PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
555: for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
556: pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
557: pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
558: }
560: /* get basic dims */
561: MatGetBlockSize(Pmat, &bs);
563: MatGetSize(Pmat, &M, &qq);
564: if (pc_gamg->verbose) {
565: PetscInt NN = M;
566: if (pc_gamg->verbose==1) {
567: MatGetInfo(Pmat,MAT_LOCAL,&info);
568: MatGetLocalSize(Pmat, &NN, &qq);
569: } else {
570: MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
571: }
572: nnz0 = info.nz_used;
573: nnztot = info.nz_used;
574: PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
575: rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
576: (int)(nnz0/(PetscReal)NN),size);
577: }
579: /* Get A_i and R_i */
580: for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
581: level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */
582: level++) {
583: level1 = level + 1;
584: #if defined PETSC_GAMG_USE_LOG
585: PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
586: #if (defined GAMG_STAGES)
587: PetscLogStagePush(gamg_stages[level]);
588: #endif
589: #endif
590: { /* construct prolongator */
591: Mat Gmat;
592: PetscCoarsenData *agg_lists;
593: Mat Prol11;
595: pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
596: pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
597: pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);
599: /* could have failed to create new level */
600: if (Prol11) {
601: /* get new block size of coarse matrices */
602: MatGetBlockSizes(Prol11, NULL, &bs);
604: if (pc_gamg->ops->optprol) {
605: /* smooth */
606: pc_gamg->ops->optprol(pc, Aarr[level], &Prol11);
607: }
609: Parr[level1] = Prol11;
610: } else Parr[level1] = NULL;
612: if (pc_gamg->use_aggs_in_gasm) {
613: PetscInt bs;
614: MatGetBlockSizes(Prol11, &bs, NULL);
615: PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
616: }
618: MatDestroy(&Gmat);
619: PetscCDDestroy(agg_lists);
620: } /* construct prolongator scope */
621: #if defined PETSC_GAMG_USE_LOG
622: PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
623: #endif
624: /* cache eigen estimate */
625: if (pc_gamg->emax_id != -1) {
626: PetscBool flag;
627: PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);
628: if (!flag) emaxs[level] = -1.;
629: } else emaxs[level] = -1.;
630: if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
631: if (!Parr[level1]) {
632: if (pc_gamg->verbose) {
633: PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);
634: }
635: break;
636: }
637: #if defined PETSC_GAMG_USE_LOG
638: PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
639: #endif
641: createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
642: &Parr[level1], &Aarr[level1], &nactivepe);
644: #if defined PETSC_GAMG_USE_LOG
645: PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
646: #endif
647: MatGetSize(Aarr[level1], &M, &qq);
649: if (pc_gamg->verbose > 0) {
650: PetscInt NN = M;
651: if (pc_gamg->verbose==1) {
652: MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
653: MatGetLocalSize(Aarr[level1], &NN, &qq);
654: } else {
655: MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
656: }
658: nnztot += info.nz_used;
659: PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
660: rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
661: (int)(info.nz_used/(PetscReal)NN), nactivepe);
662: }
664: /* stop if one node -- could pull back for singular problems */
665: if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M < 2)) {
666: level++;
667: break;
668: }
669: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
670: PetscLogStagePop();
671: #endif
672: } /* levels */
674: if (pc_gamg->data) {
675: PetscFree(pc_gamg->data);
676: pc_gamg->data = NULL;
677: }
679: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
680: pc_gamg->Nlevels = level + 1;
681: fine_level = level;
682: PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);
684: /* simple setup */
685: if (!PETSC_TRUE) {
686: PC_MG_Levels **mglevels = mg->levels;
687: for (lidx=0,level=pc_gamg->Nlevels-1;
688: lidx<fine_level;
689: lidx++, level--) {
690: PCMGSetInterpolation(pc, lidx+1, Parr[level]);
691: KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);
692: MatDestroy(&Parr[level]);
693: MatDestroy(&Aarr[level]);
694: }
695: KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);
697: PCSetUp_MG(pc);
698: } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
699: /* set default smoothers & set operators */
700: for (lidx = 1, level = pc_gamg->Nlevels-2;
701: lidx <= fine_level;
702: lidx++, level--) {
703: KSP smoother;
704: PC subpc;
706: PCMGGetSmoother(pc, lidx, &smoother);
707: KSPGetPC(smoother, &subpc);
709: KSPSetNormType(smoother, KSP_NORM_NONE);
710: /* set ops */
711: KSPSetOperators(smoother, Aarr[level], Aarr[level]);
712: PCMGSetInterpolation(pc, lidx, Parr[level+1]);
714: /* set defaults */
715: KSPSetType(smoother, KSPCHEBYSHEV);
717: /* set blocks for GASM smoother that uses the 'aggregates' */
718: if (pc_gamg->use_aggs_in_gasm) {
719: PetscInt sz;
720: IS *is;
722: sz = nASMBlocksArr[level];
723: is = ASMLocalIDsArr[level];
724: PCSetType(subpc, PCGASM);
725: PCGASMSetOverlap(subpc, 0);
726: if (sz==0) {
727: IS is;
728: PetscInt my0,kk;
729: MatGetOwnershipRange(Aarr[level], &my0, &kk);
730: ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
731: PCGASMSetSubdomains(subpc, 1, &is, NULL);
732: ISDestroy(&is);
733: } else {
734: PetscInt kk;
735: PCGASMSetSubdomains(subpc, sz, is, NULL);
736: for (kk=0; kk<sz; kk++) {
737: ISDestroy(&is[kk]);
738: }
739: PetscFree(is);
740: }
741: ASMLocalIDsArr[level] = NULL;
742: nASMBlocksArr[level] = 0;
743: PCGASMSetType(subpc, PC_GASM_BASIC);
744: } else {
745: PCSetType(subpc, PCSOR);
746: }
747: }
748: {
749: /* coarse grid */
750: KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
751: Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
752: PCMGGetSmoother(pc, lidx, &smoother);
753: KSPSetOperators(smoother, Lmat, Lmat);
754: KSPSetNormType(smoother, KSP_NORM_NONE);
755: KSPGetPC(smoother, &subpc);
756: PCSetType(subpc, PCBJACOBI);
757: PCSetUp(subpc);
758: PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
759: if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
760: KSPGetPC(k2[0],&pc2);
761: PCSetType(pc2, PCLU);
762: PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
763: KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
764: /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
765: * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
766: * view every subdomain as though they were different. */
767: ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
768: }
770: /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
771: PetscObjectOptionsBegin((PetscObject)pc);
772: PCSetFromOptions_MG(pc);
773: PetscOptionsEnd();
774: if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
776: /* create cheby smoothers */
777: for (lidx = 1, level = pc_gamg->Nlevels-2;
778: lidx <= fine_level;
779: lidx++, level--) {
780: KSP smoother;
781: PetscBool flag,flag2;
782: PC subpc;
784: PCMGGetSmoother(pc, lidx, &smoother);
785: KSPGetPC(smoother, &subpc);
787: /* do my own cheby */
788: PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
789: if (flag) {
790: PetscReal emax, emin;
791: PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
792: PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);
793: if ((flag||flag2) && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */
794: else { /* eigen estimate 'emax' -- this is done in cheby */
795: KSP eksp;
796: Mat Lmat = Aarr[level];
797: Vec bb, xx;
799: MatGetVecs(Lmat, &bb, 0);
800: MatGetVecs(Lmat, &xx, 0);
801: {
802: PetscRandom rctx;
803: PetscRandomCreate(comm,&rctx);
804: PetscRandomSetFromOptions(rctx);
805: VecSetRandom(bb,rctx);
806: PetscRandomDestroy(&rctx);
807: }
809: /* zeroing out BC rows -- needed for crazy matrices */
810: {
811: PetscInt Istart,Iend,ncols,jj,Ii;
812: PetscScalar zero = 0.0;
813: MatGetOwnershipRange(Lmat, &Istart, &Iend);
814: for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
815: MatGetRow(Lmat,Ii,&ncols,0,0);
816: if (ncols <= 1) {
817: VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);
818: }
819: MatRestoreRow(Lmat,Ii,&ncols,0,0);
820: }
821: VecAssemblyBegin(bb);
822: VecAssemblyEnd(bb);
823: }
825: KSPCreate(comm, &eksp);
826: KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);
827: KSPSetNormType(eksp, KSP_NORM_NONE);
828: KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
829: KSPAppendOptionsPrefix(eksp, "gamg_est_");
830: KSPSetFromOptions(eksp);
832: KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
833: KSPSetOperators(eksp, Lmat, Lmat);
834: KSPSetComputeSingularValues(eksp,PETSC_TRUE);
836: /* set PC type to be same as smoother */
837: KSPSetPC(eksp, subpc);
839: /* solve - keep stuff out of logging */
840: PetscLogEventDeactivate(KSP_Solve);
841: PetscLogEventDeactivate(PC_Apply);
842: KSPSolve(eksp, bb, xx);
843: PetscLogEventActivate(KSP_Solve);
844: PetscLogEventActivate(PC_Apply);
846: KSPComputeExtremeSingularValues(eksp, &emax, &emin);
848: VecDestroy(&xx);
849: VecDestroy(&bb);
850: KSPDestroy(&eksp);
852: if (pc_gamg->verbose > 0) {
853: PetscInt N1, tt;
854: MatGetSize(Aarr[level], &N1, &tt);
855: PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
856: }
857: }
858: {
859: PetscInt N1, N0;
860: MatGetSize(Aarr[level], &N1, NULL);
861: MatGetSize(Aarr[level+1], &N0, NULL);
862: /* heuristic - is this crap? */
863: /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
864: emin = emax * pc_gamg->eigtarget[0];
865: emax *= pc_gamg->eigtarget[1];
866: }
867: KSPChebyshevSetEigenvalues(smoother, emax, emin);
868: } /* setup checby flag */
869: } /* non-coarse levels */
871: /* clean up */
872: for (level=1; level<pc_gamg->Nlevels; level++) {
873: MatDestroy(&Parr[level]);
874: MatDestroy(&Aarr[level]);
875: }
877: PCSetUp_MG(pc);
879: if (PETSC_TRUE) {
880: KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
881: PCMGGetSmoother(pc, 0, &smoother);
882: KSPSetType(smoother, KSPPREONLY);
883: }
884: } else {
885: KSP smoother;
886: if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
887: PCMGGetSmoother(pc, 0, &smoother);
888: KSPSetOperators(smoother, Aarr[0], Aarr[0]);
889: KSPSetType(smoother, KSPPREONLY);
890: PCSetUp_MG(pc);
891: }
892: return(0);
893: }
895: /* ------------------------------------------------------------------------- */
896: /*
897: PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
898: that was created with PCCreate_GAMG().
900: Input Parameter:
901: . pc - the preconditioner context
903: Application Interface Routine: PCDestroy()
904: */
907: PetscErrorCode PCDestroy_GAMG(PC pc)
908: {
910: PC_MG *mg = (PC_MG*)pc->data;
911: PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx;
914: PCReset_GAMG(pc);
915: if (pc_gamg->ops->destroy) {
916: (*pc_gamg->ops->destroy)(pc);
917: }
918: PetscFree(pc_gamg->ops);
919: PetscFree(pc_gamg->gamg_type_name);
920: PetscFree(pc_gamg);
921: PCDestroy_MG(pc);
922: return(0);
923: }
928: /*@
929: PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
930: processor reduction.
932: Not Collective on PC
934: Input Parameters:
935: . pc - the preconditioner context
938: Options Database Key:
939: . -pc_gamg_process_eq_limit
941: Level: intermediate
943: Concepts: Unstructured multrigrid preconditioner
945: .seealso: ()
946: @*/
947: PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n)
948: {
953: PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
954: return(0);
955: }
959: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
960: {
961: PC_MG *mg = (PC_MG*)pc->data;
962: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
965: if (n>0) pc_gamg->min_eq_proc = n;
966: return(0);
967: }
971: /*@
972: PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
974: Collective on PC
976: Input Parameters:
977: . pc - the preconditioner context
980: Options Database Key:
981: . -pc_gamg_coarse_eq_limit
983: Level: intermediate
985: Concepts: Unstructured multrigrid preconditioner
987: .seealso: ()
988: @*/
989: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
990: {
995: PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
996: return(0);
997: }
1001: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1002: {
1003: PC_MG *mg = (PC_MG*)pc->data;
1004: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1007: if (n>0) pc_gamg->coarse_eq_limit = n;
1008: return(0);
1009: }
1013: /*@
1014: PCGAMGSetRepartitioning - Repartition the coarse grids
1016: Collective on PC
1018: Input Parameters:
1019: . pc - the preconditioner context
1022: Options Database Key:
1023: . -pc_gamg_repartition
1025: Level: intermediate
1027: Concepts: Unstructured multrigrid preconditioner
1029: .seealso: ()
1030: @*/
1031: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1032: {
1037: PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1038: return(0);
1039: }
1043: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1044: {
1045: PC_MG *mg = (PC_MG*)pc->data;
1046: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1049: pc_gamg->repart = n;
1050: return(0);
1051: }
1055: /*@
1056: PCGAMGSetReuseProl - Reuse prlongation
1058: Collective on PC
1060: Input Parameters:
1061: . pc - the preconditioner context
1064: Options Database Key:
1065: . -pc_gamg_reuse_interpolation
1067: Level: intermediate
1069: Concepts: Unstructured multrigrid preconditioner
1071: .seealso: ()
1072: @*/
1073: PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n)
1074: {
1079: PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));
1080: return(0);
1081: }
1085: static PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n)
1086: {
1087: PC_MG *mg = (PC_MG*)pc->data;
1088: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1091: pc_gamg->reuse_prol = n;
1092: return(0);
1093: }
1097: /*@
1098: PCGAMGSetUseASMAggs -
1100: Collective on PC
1102: Input Parameters:
1103: . pc - the preconditioner context
1106: Options Database Key:
1107: . -pc_gamg_use_agg_gasm
1109: Level: intermediate
1111: Concepts: Unstructured multrigrid preconditioner
1113: .seealso: ()
1114: @*/
1115: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1116: {
1121: PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1122: return(0);
1123: }
1127: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1128: {
1129: PC_MG *mg = (PC_MG*)pc->data;
1130: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1133: pc_gamg->use_aggs_in_gasm = n;
1134: return(0);
1135: }
1139: /*@
1140: PCGAMGSetNlevels -
1142: Not collective on PC
1144: Input Parameters:
1145: . pc - the preconditioner context
1148: Options Database Key:
1149: . -pc_mg_levels
1151: Level: intermediate
1153: Concepts: Unstructured multrigrid preconditioner
1155: .seealso: ()
1156: @*/
1157: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1158: {
1163: PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1164: return(0);
1165: }
1169: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1170: {
1171: PC_MG *mg = (PC_MG*)pc->data;
1172: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1175: pc_gamg->Nlevels = n;
1176: return(0);
1177: }
1181: /*@
1182: PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1184: Not collective on PC
1186: Input Parameters:
1187: . pc - the preconditioner context
1190: Options Database Key:
1191: . -pc_gamg_threshold
1193: Level: intermediate
1195: Concepts: Unstructured multrigrid preconditioner
1197: .seealso: ()
1198: @*/
1199: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1200: {
1205: PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1206: return(0);
1207: }
1211: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1212: {
1213: PC_MG *mg = (PC_MG*)pc->data;
1214: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1217: pc_gamg->threshold = n;
1218: return(0);
1219: }
1223: /*@
1224: PCGAMGSetType - Set solution method - calls sub create method
1226: Collective on PC
1228: Input Parameters:
1229: . pc - the preconditioner context
1232: Options Database Key:
1233: . -pc_gamg_type
1235: Level: intermediate
1237: Concepts: Unstructured multrigrid preconditioner
1239: .seealso: ()
1240: @*/
1241: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1242: {
1247: PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1248: return(0);
1249: }
1253: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1254: {
1255: PetscErrorCode ierr,(*r)(PC);
1256: PC_MG *mg = (PC_MG*)pc->data;
1257: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1260: PetscFunctionListFind(GAMGList,type,&r);
1261: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1262: if (pc_gamg->ops->destroy) {
1263: /* there was something here - kill it */
1264: (*pc_gamg->ops->destroy)(pc);
1265: PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1266: /* cleaning up common data in pc_gamg - this should disapear someday */
1267: pc_gamg->data_cell_cols = 0;
1268: pc_gamg->data_cell_rows = 0;
1269: pc_gamg->orig_data_cell_cols = 0;
1270: pc_gamg->orig_data_cell_rows = 0;
1271: if (pc_gamg->data_sz) {
1272: PetscFree(pc_gamg->data);
1273: pc_gamg->data_sz = 0;
1274: }
1275: else if (pc_gamg->data) {
1276: PetscFree(pc_gamg->data); /* can this happen ? */
1277: }
1278: }
1279: PetscFree(pc_gamg->gamg_type_name);
1280: PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1281: (*r)(pc);
1282: return(0);
1283: }
1287: PetscErrorCode PCSetFromOptions_GAMG(PC pc)
1288: {
1290: PC_MG *mg = (PC_MG*)pc->data;
1291: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1292: PetscBool flag;
1293: PetscInt two = 2;
1294: MPI_Comm comm;
1297: PetscObjectGetComm((PetscObject)pc,&comm);
1298: PetscOptionsHead("GAMG options");
1299: {
1300: /* -pc_gamg_type */
1301: {
1302: char tname[256];
1303: PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1304: /* call PCCreateGAMG_XYZ */
1305: if (flag) {
1306: PCGAMGSetType(pc,tname);
1307: }
1308: }
1309: /* -pc_gamg_verbose */
1310: PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1311: "none", pc_gamg->verbose,
1312: &pc_gamg->verbose, NULL);
1313: /* -pc_gamg_repartition */
1314: PetscOptionsBool("-pc_gamg_repartition",
1315: "Repartion coarse grids (false)",
1316: "PCGAMGRepartitioning",
1317: pc_gamg->repart,
1318: &pc_gamg->repart,
1319: &flag);
1320: /* -pc_gamg_reuse_interpolation */
1321: PetscOptionsBool("-pc_gamg_reuse_interpolation",
1322: "Reuse prolongation operator (true)",
1323: "PCGAMGReuseProl",
1324: pc_gamg->reuse_prol,
1325: &pc_gamg->reuse_prol,
1326: &flag);
1327: /* -pc_gamg_use_agg_gasm */
1328: PetscOptionsBool("-pc_gamg_use_agg_gasm",
1329: "Use aggregation agragates for GASM smoother (false)",
1330: "PCGAMGUseASMAggs",
1331: pc_gamg->use_aggs_in_gasm,
1332: &pc_gamg->use_aggs_in_gasm,
1333: &flag);
1334: /* -pc_gamg_process_eq_limit */
1335: PetscOptionsInt("-pc_gamg_process_eq_limit",
1336: "Limit (goal) on number of equations per process on coarse grids",
1337: "PCGAMGSetProcEqLim",
1338: pc_gamg->min_eq_proc,
1339: &pc_gamg->min_eq_proc,
1340: &flag);
1341: /* -pc_gamg_coarse_eq_limit */
1342: PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1343: "Limit on number of equations for the coarse grid",
1344: "PCGAMGSetCoarseEqLim",
1345: pc_gamg->coarse_eq_limit,
1346: &pc_gamg->coarse_eq_limit,
1347: &flag);
1348: /* -pc_gamg_threshold */
1349: PetscOptionsReal("-pc_gamg_threshold",
1350: "Relative threshold to use for dropping edges in aggregation graph",
1351: "PCGAMGSetThreshold",
1352: pc_gamg->threshold,
1353: &pc_gamg->threshold,
1354: &flag);
1355: if (flag && pc_gamg->verbose) {
1356: PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1357: }
1358: /* -pc_gamg_eigtarget */
1359: PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1360: PetscOptionsInt("-pc_mg_levels",
1361: "Set number of MG levels",
1362: "PCGAMGSetNlevels",
1363: pc_gamg->Nlevels,
1364: &pc_gamg->Nlevels,
1365: &flag);
1367: /* set options for subtype */
1368: if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(pc);}
1369: }
1370: PetscOptionsTail();
1371: return(0);
1372: }
1374: /* -------------------------------------------------------------------------- */
1375: /*MC
1376: PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1377: - This is the entry point to GAMG, registered in pcregis.c
1379: Options Database Keys:
1380: Multigrid options(inherited)
1381: + -pc_mg_cycles <v>: v or w (PCMGSetCycleType)
1382: . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1383: . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1384: - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade
1386: Level: intermediate
1388: Concepts: multigrid
1390: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1391: PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1392: PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1393: PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1394: PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1395: M*/
1399: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1400: {
1402: PC_GAMG *pc_gamg;
1403: PC_MG *mg;
1404: #if defined PETSC_GAMG_USE_LOG
1405: static long count = 0;
1406: #endif
1409: /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1410: PCSetType(pc, PCMG); /* calls PCCreate_MG() and MGCreate_Private() */
1411: PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);
1413: /* create a supporting struct and attach it to pc */
1414: PetscNewLog(pc,&pc_gamg);
1415: mg = (PC_MG*)pc->data;
1416: mg->galerkin = 2; /* Use Galerkin, but it is computed externally */
1417: mg->innerctx = pc_gamg;
1419: PetscNewLog(pc,&pc_gamg->ops);
1421: pc_gamg->setup_count = 0;
1422: /* these should be in subctx but repartitioning needs simple arrays */
1423: pc_gamg->data_sz = 0;
1424: pc_gamg->data = 0;
1426: /* register AMG type */
1427: PCGAMGInitializePackage();
1429: /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1430: pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1431: pc->ops->setup = PCSetUp_GAMG;
1432: pc->ops->reset = PCReset_GAMG;
1433: pc->ops->destroy = PCDestroy_GAMG;
1435: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1436: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1437: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1438: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseProl_C",PCGAMGSetReuseProl_GAMG);
1439: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1440: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1441: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1442: PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1443: pc_gamg->repart = PETSC_FALSE;
1444: pc_gamg->reuse_prol = PETSC_FALSE;
1445: pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1446: pc_gamg->min_eq_proc = 50;
1447: pc_gamg->coarse_eq_limit = 800;
1448: pc_gamg->threshold = 0.;
1449: pc_gamg->Nlevels = GAMG_MAXLEVELS;
1450: pc_gamg->verbose = 0;
1451: pc_gamg->emax_id = -1;
1452: pc_gamg->eigtarget[0] = 0.05;
1453: pc_gamg->eigtarget[1] = 1.05;
1455: /* private events */
1456: #if defined PETSC_GAMG_USE_LOG
1457: if (count++ == 0) {
1458: PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1459: PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1460: /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1461: /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1462: /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1463: PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1464: PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1465: PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1466: PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1467: PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1468: PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1469: PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1470: PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1471: PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1472: PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1473: PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1474: PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1476: /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1477: /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1478: /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1479: /* create timer stages */
1480: #if defined GAMG_STAGES
1481: {
1482: char str[32];
1483: PetscInt lidx;
1484: sprintf(str,"MG Level %d (finest)",0);
1485: PetscLogStageRegister(str, &gamg_stages[0]);
1486: for (lidx=1; lidx<9; lidx++) {
1487: sprintf(str,"MG Level %d",lidx);
1488: PetscLogStageRegister(str, &gamg_stages[lidx]);
1489: }
1490: }
1491: #endif
1492: }
1493: #endif
1494: /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1495: PCGAMGSetType(pc,PCGAMGAGG);
1496: return(0);
1497: }
1501: /*@C
1502: PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1503: from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1504: when using static libraries.
1506: Level: developer
1508: .keywords: PC, PCGAMG, initialize, package
1509: .seealso: PetscInitialize()
1510: @*/
1511: PetscErrorCode PCGAMGInitializePackage(void)
1512: {
1516: if (PCGAMGPackageInitialized) return(0);
1517: PCGAMGPackageInitialized = PETSC_TRUE;
1518: PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1519: PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1520: PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1521: PetscRegisterFinalize(PCGAMGFinalizePackage);
1523: /* general events */
1524: #if defined PETSC_USE_LOG
1525: PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1526: PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1527: PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1528: PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1529: PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1530: PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1531: PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1532: #endif
1534: return(0);
1535: }
1539: /*@C
1540: PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1541: called from PetscFinalize().
1543: Level: developer
1545: .keywords: Petsc, destroy, package
1546: .seealso: PetscFinalize()
1547: @*/
1548: PetscErrorCode PCGAMGFinalizePackage(void)
1549: {
1553: PCGAMGPackageInitialized = PETSC_FALSE;
1554: PetscFunctionListDestroy(&GAMGList);
1555: return(0);
1556: }