Actual source code: gamg.c

petsc-3.6.4 2016-04-12
Report Typos and Errors
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
  4: #include <petsc/private/matimpl.h>
  5: #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
  6: #include <petsc/private/kspimpl.h>
  7: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  9: #if defined PETSC_GAMG_USE_LOG
 10: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 11: #endif

 13: #if defined PETSC_USE_LOG
 14: PetscLogEvent PC_GAMGGraph_AGG;
 15: PetscLogEvent PC_GAMGGraph_GEO;
 16: PetscLogEvent PC_GAMGCoarsen_AGG;
 17: PetscLogEvent PC_GAMGCoarsen_GEO;
 18: PetscLogEvent PC_GAMGProlongator_AGG;
 19: PetscLogEvent PC_GAMGProlongator_GEO;
 20: PetscLogEvent PC_GAMGOptProlongator_AGG;
 21: #endif

 23: #define GAMG_MAXLEVELS 30

 25: /* #define GAMG_STAGES */
 26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 28: #endif

 30: static PetscFunctionList GAMGList = 0;
 31: static PetscBool PCGAMGPackageInitialized;

 33: /* ----------------------------------------------------------------------------- */
 36: PetscErrorCode PCReset_GAMG(PC pc)
 37: {
 39:   PC_MG          *mg      = (PC_MG*)pc->data;
 40:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 43:   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
 44:     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
 45:     PetscFree(pc_gamg->data);
 46:   }
 47:   pc_gamg->data_sz = 0;
 48:   PetscFree(pc_gamg->orig_data);
 49:   return(0);
 50: }

 52: /* -------------------------------------------------------------------------- */
 53: /*
 54:    PCGAMGCreateLevel_GAMG: create coarse op with RAP.  repartition and/or reduce number
 55:      of active processors.

 57:    Input Parameter:
 58:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
 59:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
 60:    . Amat_fine - matrix on this fine (k) level
 61:    . cr_bs - coarse block size
 62:    In/Output Parameter:
 63:    . a_P_inout - prolongation operator to the next level (k-->k-1)
 64:    . a_nactive_proc - number of active procs
 65:    Output Parameter:
 66:    . a_Amat_crs - coarse matrix that is created (k-1)
 67: */

 71: static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,
 72:                                   Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,
 73:                                   IS * Pcolumnperm)
 74: {
 75:   PetscErrorCode  ierr;
 76:   PC_MG           *mg         = (PC_MG*)pc->data;
 77:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
 78:   Mat             Cmat,Pold=*a_P_inout;
 79:   MPI_Comm        comm;
 80:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
 81:   PetscInt        ncrs_eq,ncrs,f_bs;

 84:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
 85:   MPI_Comm_rank(comm, &rank);
 86:   MPI_Comm_size(comm, &size);
 87:   MatGetBlockSize(Amat_fine, &f_bs);
 88:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

 90:   /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/
 91:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);
 92:   if (pc_gamg->data_cell_rows>0) {
 93:     ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
 94:   } else {
 95:     PetscInt  bs;
 96:     MatGetBlockSize(Cmat, &bs);
 97:     ncrs = ncrs_eq/bs;
 98:   }

100:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
101:   {
102:     PetscInt ncrs_eq_glob;
103:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
104:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
105:     if (new_size == 0) new_size = 1; /* not likely, posible? */
106:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
107:   }

109:   if (Pcolumnperm) *Pcolumnperm = NULL;

111:   if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
112:   else {
113:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old;
114:     IS             is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices;

116:     nloc_old = ncrs_eq/cr_bs;
117:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
118: #if defined PETSC_GAMG_USE_LOG
119:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
120: #endif
121:     /* make 'is_eq_newproc' */
122:     PetscMalloc1(size, &counts);
123:     if (pc_gamg->repart) {
124:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
125:       Mat adj;

127:      PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);

129:       /* get 'adj' */
130:       if (cr_bs == 1) {
131:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
132:       } else {
133:         /* make a scalar matrix to partition (no Stokes here) */
134:         Mat               tMat;
135:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
136:         const PetscScalar *vals;
137:         const PetscInt    *idx;
138:         PetscInt          *d_nnz, *o_nnz, M, N;
139:         static PetscInt   llev = 0;
140:         MatType           mtype;

142:         PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);
143:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
144:         MatGetSize(Cmat, &M, &N);
145:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
146:           MatGetRow(Cmat,Ii,&ncols,0,0);
147:           d_nnz[jj] = ncols/cr_bs;
148:           o_nnz[jj] = ncols/cr_bs;
149:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
150:           if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs;
151:           if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs;
152:         }

154:         MatGetType(Amat_fine,&mtype);
155:         MatCreate(comm, &tMat);
156:         MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);
157:         MatSetType(tMat,mtype);
158:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
159:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
160:         PetscFree2(d_nnz,o_nnz);

162:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
163:           PetscInt dest_row = ii/cr_bs;
164:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
165:           for (jj = 0; jj < ncols; jj++) {
166:             PetscInt    dest_col = idx[jj]/cr_bs;
167:             PetscScalar v        = 1.0;
168:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
169:           }
170:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
171:         }
172:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
173:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

175:         if (llev++ == -1) {
176:           PetscViewer viewer; char fname[32];
177:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
178:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
179:           MatView(tMat, viewer);
180:           PetscViewerDestroy(&viewer);
181:         }

183:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);

185:         MatDestroy(&tMat);
186:       } /* create 'adj' */

188:       { /* partition: get newproc_idx */
189:         char            prefix[256];
190:         const char      *pcpre;
191:         const PetscInt  *is_idx;
192:         MatPartitioning mpart;
193:         IS              proc_is;
194:         PetscInt        targetPE;

196:         MatPartitioningCreate(comm, &mpart);
197:         MatPartitioningSetAdjacency(mpart, adj);
198:         PCGetOptionsPrefix(pc, &pcpre);
199:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
200:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
201:         MatPartitioningSetFromOptions(mpart);
202:         MatPartitioningSetNParts(mpart, new_size);
203:         MatPartitioningApply(mpart, &proc_is);
204:         MatPartitioningDestroy(&mpart);

206:         /* collect IS info */
207:         PetscMalloc1(ncrs_eq, &newproc_idx);
208:         ISGetIndices(proc_is, &is_idx);
209:         targetPE = 1; /* bring to "front" of machine */
210:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
211:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
212:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
213:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
214:           }
215:         }
216:         ISRestoreIndices(proc_is, &is_idx);
217:         ISDestroy(&proc_is);
218:       }
219:       MatDestroy(&adj);

221:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
222:       PetscFree(newproc_idx);
223:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */

225:       PetscInt rfactor,targetPE;
226:       /* find factor */
227:       if (new_size == 1) rfactor = size; /* easy */
228:       else {
229:         PetscReal best_fact = 0.;
230:         jj = -1;
231:         for (kk = 1 ; kk <= size ; kk++) {
232:           if (size%kk==0) { /* a candidate */
233:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
234:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
235:             if (fact > best_fact) {
236:               best_fact = fact; jj = kk;
237:             }
238:           }
239:         }
240:         if (jj != -1) rfactor = jj;
241:         else rfactor = 1; /* does this happen .. a prime */
242:       }
243:       new_size = size/rfactor;

245:       if (new_size==nactive) {
246:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
247:         PetscFree(counts);
248:         PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);
249:         return(0);
250:       }

252:       PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);
253:       targetPE = rank/rfactor;
254:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);
255:     } /* end simple 'is_eq_newproc' */

257:     /*
258:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
259:      */
260:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
261:     is_eq_num_prim = is_eq_num;
262:     /*
263:       Determine how many equations/vertices are assigned to each processor
264:      */
265:     ISPartitioningCount(is_eq_newproc, size, counts);
266:     ncrs_eq_new = counts[rank];
267:     ISDestroy(&is_eq_newproc);
268:     ncrs_new = ncrs_eq_new/cr_bs; /* eqs */

270:     PetscFree(counts);
271: #if defined PETSC_GAMG_USE_LOG
272:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
273: #endif
274:     /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */
275:     {
276:     Vec            src_crd, dest_crd;
277:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
278:     VecScatter     vecscat;
279:     PetscScalar    *array;
280:     IS isscat;

282:     /* move data (for primal equations only) */
283:     /* Create a vector to contain the newly ordered element information */
284:     VecCreate(comm, &dest_crd);
285:     VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);
286:     VecSetType(dest_crd,VECSTANDARD); /* this is needed! */
287:     /*
288:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
289:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
290:      */
291:     PetscMalloc1(ncrs*node_data_sz, &tidx);
292:     ISGetIndices(is_eq_num_prim, &idx);
293:     for (ii=0,jj=0; ii<ncrs; ii++) {
294:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
295:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
296:     }
297:     ISRestoreIndices(is_eq_num_prim, &idx);
298:     ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);
299:     PetscFree(tidx);
300:     /*
301:      Create a vector to contain the original vertex information for each element
302:      */
303:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);
304:     for (jj=0; jj<ndata_cols; jj++) {
305:       const PetscInt stride0=ncrs*pc_gamg->data_cell_rows;
306:       for (ii=0; ii<ncrs; ii++) {
307:         for (kk=0; kk<ndata_rows; kk++) {
308:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
309:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
310:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
311:         }
312:       }
313:     }
314:     VecAssemblyBegin(src_crd);
315:     VecAssemblyEnd(src_crd);
316:     /*
317:       Scatter the element vertex information (still in the original vertex ordering)
318:       to the correct processor
319:     */
320:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
321:     ISDestroy(&isscat);
322:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
323:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
324:     VecScatterDestroy(&vecscat);
325:     VecDestroy(&src_crd);
326:     /*
327:       Put the element vertex data into a new allocation of the gdata->ele
328:     */
329:     PetscFree(pc_gamg->data);
330:     PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);

332:     pc_gamg->data_sz = node_data_sz*ncrs_new;
333:     strideNew        = ncrs_new*ndata_rows;

335:     VecGetArray(dest_crd, &array);
336:     for (jj=0; jj<ndata_cols; jj++) {
337:       for (ii=0; ii<ncrs_new; ii++) {
338:         for (kk=0; kk<ndata_rows; kk++) {
339:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
340:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
341:         }
342:       }
343:     }
344:     VecRestoreArray(dest_crd, &array);
345:     VecDestroy(&dest_crd);
346:     }
347:     /* move A and P (columns) with new layout */
348: #if defined PETSC_GAMG_USE_LOG
349:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
350: #endif

352:     /*
353:       Invert for MatGetSubMatrix
354:     */
355:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
356:     ISSort(new_eq_indices); /* is this needed? */
357:     ISSetBlockSize(new_eq_indices, cr_bs);
358:     if (is_eq_num != is_eq_num_prim) {
359:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
360:     }
361:     if (Pcolumnperm) {
362:       PetscObjectReference((PetscObject)new_eq_indices);
363:       *Pcolumnperm = new_eq_indices;
364:     }
365:     ISDestroy(&is_eq_num);
366: #if defined PETSC_GAMG_USE_LOG
367:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
368:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
369: #endif
370:     /* 'a_Amat_crs' output */
371:     {
372:       Mat mat;
373:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
374:       *a_Amat_crs = mat;

376:       if (!PETSC_TRUE) {
377:         PetscInt cbs, rbs;
378:         MatGetBlockSizes(Cmat, &rbs, &cbs);
379:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
380:         MatGetBlockSizes(mat, &rbs, &cbs);
381:         PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
382:       }
383:     }
384:     MatDestroy(&Cmat);

386: #if defined PETSC_GAMG_USE_LOG
387:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
388: #endif
389:     /* prolongator */
390:     {
391:       IS       findices;
392:       PetscInt Istart,Iend;
393:       Mat      Pnew;
394:       MatGetOwnershipRange(Pold, &Istart, &Iend);
395: #if defined PETSC_GAMG_USE_LOG
396:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
397: #endif
398:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
399:       ISSetBlockSize(findices,f_bs);
400:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
401:       ISDestroy(&findices);

403:       if (!PETSC_TRUE) {
404:         PetscInt cbs, rbs;
405:         MatGetBlockSizes(Pold, &rbs, &cbs);
406:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
407:         MatGetBlockSizes(Pnew, &rbs, &cbs);
408:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
409:       }
410: #if defined PETSC_GAMG_USE_LOG
411:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
412: #endif
413:       MatDestroy(a_P_inout);

415:       /* output - repartitioned */
416:       *a_P_inout = Pnew;
417:     }
418:     ISDestroy(&new_eq_indices);

420:     *a_nactive_proc = new_size; /* output */
421:   }

423:   /* outout matrix data */
424:   if (!PETSC_TRUE) {
425:     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
426:     if (llev==0) {
427:       sprintf(fname,"Cmat_%d.m",llev++);
428:       PetscViewerASCIIOpen(comm,fname,&viewer);
429:       PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
430:       MatView(Amat_fine, viewer);
431:       PetscViewerDestroy(&viewer);
432:     }
433:     sprintf(fname,"Cmat_%d.m",llev++);
434:     PetscViewerASCIIOpen(comm,fname,&viewer);
435:     PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
436:     MatView(Cmat, viewer);
437:     PetscViewerDestroy(&viewer);
438:   }
439:   return(0);
440: }

442: /* -------------------------------------------------------------------------- */
443: /*
444:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
445:                     by setting data structures and options.

447:    Input Parameter:
448: .  pc - the preconditioner context

450: */
453: PetscErrorCode PCSetUp_GAMG(PC pc)
454: {
456:   PC_MG          *mg      = (PC_MG*)pc->data;
457:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
458:   Mat            Pmat     = pc->pmat;
459:   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
460:   MPI_Comm       comm;
461:   PetscMPIInt    rank,size,nactivepe;
462:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
463:   PetscReal      emaxs[GAMG_MAXLEVELS];
464:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
465:   PetscLogDouble nnz0=0.,nnztot=0.;
466:   MatInfo        info;

469:   PetscObjectGetComm((PetscObject)pc,&comm);
470:   MPI_Comm_rank(comm,&rank);
471:   MPI_Comm_size(comm,&size);

473:   if (pc_gamg->setup_count++ > 0) {
474:     if ((PetscBool)(!pc_gamg->reuse_prol)) {
475:       /* reset everything */
476:       PCReset_MG(pc);
477:       pc->setupcalled = 0;
478:     } else {
479:       PC_MG_Levels **mglevels = mg->levels;
480:       /* just do Galerkin grids */
481:       Mat          B,dA,dB;

483:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
484:       if (pc_gamg->Nlevels > 1) {
485:         /* currently only handle case where mat and pmat are the same on coarser levels */
486:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);
487:         /* (re)set to get dirty flag */
488:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);

490:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
491:           /* the first time through the matrix structure has changed from repartitioning */
492:           if (pc_gamg->setup_count==2) {
493:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
494:             MatDestroy(&mglevels[level]->A);

496:             mglevels[level]->A = B;
497:           } else {
498:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B);
499:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
500:           }
501:           KSPSetOperators(mglevels[level]->smoothd,B,B);
502:           dB   = B;
503:         }
504:       }

506:       PCSetUp_MG(pc);

508:       return(0);
509:     }
510:   }

512:   if (!pc_gamg->data) {
513:     if (pc_gamg->orig_data) {
514:       MatGetBlockSize(Pmat, &bs);
515:       MatGetLocalSize(Pmat, &qq, NULL);

517:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
518:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
519:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

521:       PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);
522:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
523:     } else {
524:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
525:       pc_gamg->ops->createdefaultdata(pc,Pmat);
526:     }
527:   }

529:   /* cache original data for reuse */
530:   if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) {
531:     PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);
532:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
533:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
534:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
535:   }

537:   /* get basic dims */
538:   MatGetBlockSize(Pmat, &bs);
539:   MatGetSize(Pmat, &M, &qq);

541:   MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info); /* global reduction */
542:   nnz0   = info.nz_used;
543:   nnztot = info.nz_used;
544:   PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
545:                     0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
546:                     (int)(nnz0/(PetscReal)M+0.5),size);
547: 

549:   /* Get A_i and R_i */
550:   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
551:        level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit);
552:        level++) {
553:     pc_gamg->current_level = level;
554:     level1 = level + 1;
555: #if defined PETSC_GAMG_USE_LOG
556:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
557: #if (defined GAMG_STAGES)
558:     PetscLogStagePush(gamg_stages[level]);
559: #endif
560: #endif
561:     { /* construct prolongator */
562:       Mat              Gmat;
563:       PetscCoarsenData *agg_lists;
564:       Mat              Prol11;

566:       pc_gamg->ops->graph(pc,Aarr[level], &Gmat);
567:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
568:       pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);

570:       /* could have failed to create new level */
571:       if (Prol11) {
572:         /* get new block size of coarse matrices */
573:         MatGetBlockSizes(Prol11, NULL, &bs);

575:         if (pc_gamg->ops->optprolongator) {
576:           /* smooth */
577:           pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);
578:         }

580:         Parr[level1] = Prol11;
581:       } else Parr[level1] = NULL;

583:       if (pc_gamg->use_aggs_in_gasm) {
584:         PetscInt bs;
585:         MatGetBlockSizes(Prol11, &bs, NULL);
586:         PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
587:       }

589:       MatDestroy(&Gmat);
590:       PetscCDDestroy(agg_lists);
591:     } /* construct prolongator scope */
592: #if defined PETSC_GAMG_USE_LOG
593:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
594: #endif
595:     /* cache eigen estimate */
596:     if (pc_gamg->emax_id != -1) {
597:       PetscBool flag;
598:       PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);
599:       if (!flag) emaxs[level] = -1.;
600:     } else emaxs[level] = -1.;
601:     if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
602:     if (!Parr[level1]) {
603:        PetscInfo1(pc,"Stop gridding, level %D\n",level);
604: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
605:       PetscLogStagePop();
606: #endif
607:       break;
608:     }
609: #if defined PETSC_GAMG_USE_LOG
610:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
611: #endif

613:     pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);

615: #if defined PETSC_GAMG_USE_LOG
616:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
617: #endif
618:     MatGetSize(Aarr[level1], &M, &qq);

620:     MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
621:     nnztot += info.nz_used;
622:     PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);

624: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
625:     PetscLogStagePop();
626: #endif
627:     /* stop if one node or one proc -- could pull back for singular problems */
628:     if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) {
629:        PetscInfo2(pc,"HARD stop of coarsening on level %D.  Grid too small: %D block nodes\n",level,M/bs);
630:       level++;
631:       break;
632:     }
633:   } /* levels */
634:   PetscFree(pc_gamg->data);

636:   PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);
637:   pc_gamg->Nlevels = level + 1;
638:   fine_level       = level;
639:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

641:   /* simple setup */
642:   if (!PETSC_TRUE) {
643:     PC_MG_Levels **mglevels = mg->levels;
644:     for (lidx=0,level=pc_gamg->Nlevels-1;
645:          lidx<fine_level;
646:          lidx++, level--) {
647:       PCMGSetInterpolation(pc, lidx+1, Parr[level]);
648:       KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);
649:       MatDestroy(&Parr[level]);
650:       MatDestroy(&Aarr[level]);
651:     }
652:     KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);

654:     PCSetUp_MG(pc);
655:   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
656:     /* set default smoothers & set operators */
657:     for (lidx = 1, level = pc_gamg->Nlevels-2;
658:          lidx <= fine_level;
659:          lidx++, level--) {
660:       KSP smoother;
661:       PC  subpc;

663:       PCMGGetSmoother(pc, lidx, &smoother);
664:       KSPGetPC(smoother, &subpc);

666:       KSPSetNormType(smoother, KSP_NORM_NONE);
667:       /* set ops */
668:       KSPSetOperators(smoother, Aarr[level], Aarr[level]);
669:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

671:       /* set defaults */
672:       KSPSetType(smoother, KSPCHEBYSHEV);

674:       /* set blocks for GASM smoother that uses the 'aggregates' */
675:       if (pc_gamg->use_aggs_in_gasm) {
676:         PetscInt sz;
677:         IS       *is;

679:         sz   = nASMBlocksArr[level];
680:         is   = ASMLocalIDsArr[level];
681:         PCSetType(subpc, PCGASM);
682:         PCGASMSetOverlap(subpc, 0);
683:         if (sz==0) {
684:           IS       is;
685:           PetscInt my0,kk;
686:           MatGetOwnershipRange(Aarr[level], &my0, &kk);
687:           ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
688:           PCGASMSetSubdomains(subpc, 1, &is, NULL);
689:           ISDestroy(&is);
690:         } else {
691:           PetscInt kk;
692:           PCGASMSetSubdomains(subpc, sz, is, NULL);
693:           for (kk=0; kk<sz; kk++) {
694:             ISDestroy(&is[kk]);
695:           }
696:           PetscFree(is);
697:         }
698:         ASMLocalIDsArr[level] = NULL;
699:         nASMBlocksArr[level]  = 0;
700:         PCGASMSetType(subpc, PC_GASM_BASIC);
701:       } else {
702:         PCSetType(subpc, PCSOR);
703:       }
704:     }
705:     {
706:       /* coarse grid */
707:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
708:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
709:       PCMGGetSmoother(pc, lidx, &smoother);
710:       KSPSetOperators(smoother, Lmat, Lmat);
711:       KSPSetNormType(smoother, KSP_NORM_NONE);
712:       KSPGetPC(smoother, &subpc);
713:       PCSetType(subpc, PCBJACOBI);
714:       PCSetUp(subpc);
715:       PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
716:       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
717:       KSPGetPC(k2[0],&pc2);
718:       PCSetType(pc2, PCLU);
719:       PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
720:       KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
721:       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
722:        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
723:        * view every subdomain as though they were different. */
724:       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
725:     }

727:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
728:     PetscObjectOptionsBegin((PetscObject)pc);
729:     PCSetFromOptions_MG(PetscOptionsObject,pc);
730:     PetscOptionsEnd();
731:     if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators.");
732:     if (mg->galerkin == 1) mg->galerkin = 2;

734:     /* create cheby smoothers */
735:     for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) {
736:       KSP       smoother;
737:       PetscBool flag,flag2;
738:       PC        subpc;

740:       PCMGGetSmoother(pc, lidx, &smoother);
741:       KSPGetPC(smoother, &subpc);

743:       /* do my own cheby */
744:       PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
745:       if (0 && flag) {
746:         PetscReal emax, emin;
747:         PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
748:         PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);
749:         /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */
750:         if ((flag||flag2) && (emax=emaxs[level]) > 0.0) {
751:           PetscInt N1, N0;
752:           emax=emaxs[level];
753:           MatGetSize(Aarr[level], &N1, NULL);
754:           MatGetSize(Aarr[level+1], &N0, NULL);
755:           emin  = emax * pc_gamg->eigtarget[0];
756:           emax *= pc_gamg->eigtarget[1];
757:           KSPChebyshevSetEigenvalues(smoother, emax, emin);
758:         }
759:       } /* setup checby flag */
760:     } /* non-coarse levels */

762:     /* clean up */
763:     for (level=1; level<pc_gamg->Nlevels; level++) {
764:       MatDestroy(&Parr[level]);
765:       MatDestroy(&Aarr[level]);
766:     }

768:     PCSetUp_MG(pc);
769:   } else {
770:     KSP smoother;
771:     PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");
772:     PCMGGetSmoother(pc, 0, &smoother);
773:     KSPSetOperators(smoother, Aarr[0], Aarr[0]);
774:     KSPSetType(smoother, KSPPREONLY);
775:     PCSetUp_MG(pc);
776:   }
777:   return(0);
778: }

780: /* ------------------------------------------------------------------------- */
781: /*
782:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
783:    that was created with PCCreate_GAMG().

785:    Input Parameter:
786: .  pc - the preconditioner context

788:    Application Interface Routine: PCDestroy()
789: */
792: PetscErrorCode PCDestroy_GAMG(PC pc)
793: {
795:   PC_MG          *mg     = (PC_MG*)pc->data;
796:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

799:   PCReset_GAMG(pc);
800:   if (pc_gamg->ops->destroy) {
801:     (*pc_gamg->ops->destroy)(pc);
802:   }
803:   PetscFree(pc_gamg->ops);
804:   PetscFree(pc_gamg->gamg_type_name);
805:   PetscFree(pc_gamg);
806:   PCDestroy_MG(pc);
807:   return(0);
808: }


813: /*@
814:    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction.

816:    Logically Collective on PC

818:    Input Parameters:
819: +  pc - the preconditioner context
820: -  n - the number of equations


823:    Options Database Key:
824: .  -pc_gamg_process_eq_limit <limit>

826:    Level: intermediate

828:    Concepts: Unstructured multigrid preconditioner

830: .seealso: PCGAMGSetCoarseEqLim()
831: @*/
832: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
833: {

838:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
839:   return(0);
840: }

844: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
845: {
846:   PC_MG   *mg      = (PC_MG*)pc->data;
847:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

850:   if (n>0) pc_gamg->min_eq_proc = n;
851:   return(0);
852: }

856: /*@
857:    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.

859:  Collective on PC

861:    Input Parameters:
862: +  pc - the preconditioner context
863: -  n - maximum number of equations to aim for

865:    Options Database Key:
866: .  -pc_gamg_coarse_eq_limit <limit>

868:    Level: intermediate

870:    Concepts: Unstructured multigrid preconditioner

872: .seealso: PCGAMGSetProcEqLim()
873: @*/
874: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
875: {

880:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
881:   return(0);
882: }

886: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
887: {
888:   PC_MG   *mg      = (PC_MG*)pc->data;
889:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

892:   if (n>0) pc_gamg->coarse_eq_limit = n;
893:   return(0);
894: }

898: /*@
899:    PCGAMGSetRepartitioning - Repartition the coarse grids

901:    Collective on PC

903:    Input Parameters:
904: +  pc - the preconditioner context
905: -  n - PETSC_TRUE or PETSC_FALSE

907:    Options Database Key:
908: .  -pc_gamg_repartition <true,false>

910:    Level: intermediate

912:    Concepts: Unstructured multigrid preconditioner

914: .seealso: ()
915: @*/
916: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
917: {

922:   PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
923:   return(0);
924: }

928: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
929: {
930:   PC_MG   *mg      = (PC_MG*)pc->data;
931:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

934:   pc_gamg->repart = n;
935:   return(0);
936: }

940: /*@
941:    PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner

943:    Collective on PC

945:    Input Parameters:
946: +  pc - the preconditioner context
947: -  n - PETSC_TRUE or PETSC_FALSE

949:    Options Database Key:
950: .  -pc_gamg_reuse_interpolation <true,false>

952:    Level: intermediate

954:    Concepts: Unstructured multigrid preconditioner

956: .seealso: ()
957: @*/
958: PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n)
959: {

964:   PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));
965:   return(0);
966: }

970: static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n)
971: {
972:   PC_MG   *mg      = (PC_MG*)pc->data;
973:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

976:   pc_gamg->reuse_prol = n;
977:   return(0);
978: }

982: /*@
983:    PCGAMGSetUseASMAggs -

985:    Collective on PC

987:    Input Parameters:
988: .  pc - the preconditioner context


991:    Options Database Key:
992: .  -pc_gamg_use_agg_gasm

994:    Level: intermediate

996:    Concepts: Unstructured multigrid preconditioner

998: .seealso: ()
999: @*/
1000: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1001: {

1006:   PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1007:   return(0);
1008: }

1012: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1013: {
1014:   PC_MG   *mg      = (PC_MG*)pc->data;
1015:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1018:   pc_gamg->use_aggs_in_gasm = n;
1019:   return(0);
1020: }

1024: /*@
1025:    PCGAMGSetNlevels -  Sets the maximum number of levels PCGAMG will use

1027:    Not collective on PC

1029:    Input Parameters:
1030: +  pc - the preconditioner
1031: -  n - the maximum number of levels to use

1033:    Options Database Key:
1034: .  -pc_mg_levels

1036:    Level: intermediate

1038:    Concepts: Unstructured multigrid preconditioner

1040: .seealso: ()
1041: @*/
1042: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1043: {

1048:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1049:   return(0);
1050: }

1054: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1055: {
1056:   PC_MG   *mg      = (PC_MG*)pc->data;
1057:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1060:   pc_gamg->Nlevels = n;
1061:   return(0);
1062: }

1066: /*@
1067:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

1069:    Not collective on PC

1071:    Input Parameters:
1072: +  pc - the preconditioner context
1073: -  threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph

1075:    Options Database Key:
1076: .  -pc_gamg_threshold <threshold>

1078:    Level: intermediate

1080:    Concepts: Unstructured multigrid preconditioner

1082: .seealso: ()
1083: @*/
1084: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1085: {

1090:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1091:   return(0);
1092: }

1096: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1097: {
1098:   PC_MG   *mg      = (PC_MG*)pc->data;
1099:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1102:   pc_gamg->threshold = n;
1103:   return(0);
1104: }

1108: /*@
1109:    PCGAMGSetType - Set solution method

1111:    Collective on PC

1113:    Input Parameters:
1114: +  pc - the preconditioner context
1115: -  type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL

1117:    Options Database Key:
1118: .  -pc_gamg_type <agg,geo,classical>

1120:    Level: intermediate

1122:    Concepts: Unstructured multigrid preconditioner

1124: .seealso: PCGAMGGetType(), PCGAMG
1125: @*/
1126: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1127: {

1132:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1133:   return(0);
1134: }

1138: /*@
1139:    PCGAMGGetType - Get solution method

1141:    Collective on PC

1143:    Input Parameter:
1144: .  pc - the preconditioner context

1146:    Output Parameter:
1147: .  type - the type of algorithm used

1149:    Level: intermediate

1151:    Concepts: Unstructured multigrid preconditioner

1153: .seealso: PCGAMGSetType(), PCGAMGType
1154: @*/
1155: PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type)
1156: {

1161:   PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));
1162:   return(0);
1163: }

1167: static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type)
1168: {
1169:   PC_MG          *mg      = (PC_MG*)pc->data;
1170:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1173:   *type = pc_gamg->type;
1174:   return(0);
1175: }

1179: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1180: {
1181:   PetscErrorCode ierr,(*r)(PC);
1182:   PC_MG          *mg      = (PC_MG*)pc->data;
1183:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1186:   pc_gamg->type = type;
1187:   PetscFunctionListFind(GAMGList,type,&r);
1188:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1189:   if (pc_gamg->ops->destroy) {
1190:     (*pc_gamg->ops->destroy)(pc);
1191:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1192:     pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;
1193:     /* cleaning up common data in pc_gamg - this should disapear someday */
1194:     pc_gamg->data_cell_cols = 0;
1195:     pc_gamg->data_cell_rows = 0;
1196:     pc_gamg->orig_data_cell_cols = 0;
1197:     pc_gamg->orig_data_cell_rows = 0;
1198:     PetscFree(pc_gamg->data);
1199:     pc_gamg->data_sz = 0;
1200:   }
1201:   PetscFree(pc_gamg->gamg_type_name);
1202:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1203:   (*r)(pc);
1204:   return(0);
1205: }

1209: static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer)
1210: {
1212:   PC_MG          *mg      = (PC_MG*)pc->data;
1213:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1216:   PetscViewerASCIIPrintf(viewer,"    GAMG specific options\n");
1217:   PetscViewerASCIIPrintf(viewer,"      Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);
1218:   if (pc_gamg->ops->view) {
1219:     (*pc_gamg->ops->view)(pc,viewer);
1220:   }
1221:   return(0);
1222: }

1226: PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc)
1227: {
1229:   PC_MG          *mg      = (PC_MG*)pc->data;
1230:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1231:   PetscBool      flag;
1232:   PetscInt       two   = 2;
1233:   MPI_Comm       comm;

1236:   PetscObjectGetComm((PetscObject)pc,&comm);
1237:   PetscOptionsHead(PetscOptionsObject,"GAMG options");
1238:   {
1239:     char tname[256];
1240:     PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);
1241:     if (flag) {
1242:       PCGAMGSetType(pc,tname);
1243:     }
1244:     PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);
1245:     PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);
1246:     PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);
1247:     PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);
1248:     PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);
1249:     PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);
1250:     PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1251:     PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);

1253:     /* set options for subtype */
1254:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);}
1255:   }
1256:   PetscOptionsTail();
1257:   return(0);
1258: }

1260: /* -------------------------------------------------------------------------- */
1261: /*MC
1262:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioner

1264:    Options Database Keys:
1265:    Multigrid options(inherited)
1266: +  -pc_mg_cycles <v>: v or w (PCMGSetCycleType())
1267: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1268: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1269: -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade


1272:   Notes: In order to obtain good performance for PCGAMG for vector valued problems you must
1273: $       Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point
1274: $       Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator
1275: $       See the Users Manual Chapter 4 for more details

1277:   Level: intermediate

1279:   Concepts: algebraic multigrid

1281: .seealso:  PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 
1282:            PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType()
1283: M*/

1287: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1288: {
1290:   PC_GAMG        *pc_gamg;
1291:   PC_MG          *mg;

1294:   /* register AMG type */
1295:   PCGAMGInitializePackage();

1297:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1298:   PCSetType(pc, PCMG);
1299:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1301:   /* create a supporting struct and attach it to pc */
1302:   PetscNewLog(pc,&pc_gamg);
1303:   mg           = (PC_MG*)pc->data;
1304:   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally from PCMG by GAMG code */
1305:   mg->innerctx = pc_gamg;

1307:   PetscNewLog(pc,&pc_gamg->ops);

1309:   pc_gamg->setup_count = 0;
1310:   /* these should be in subctx but repartitioning needs simple arrays */
1311:   pc_gamg->data_sz = 0;
1312:   pc_gamg->data    = 0;

1314:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1315:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1316:   pc->ops->setup          = PCSetUp_GAMG;
1317:   pc->ops->reset          = PCReset_GAMG;
1318:   pc->ops->destroy        = PCDestroy_GAMG;
1319:   mg->view                = PCView_GAMG;

1321:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1322:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1323:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1324:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);
1325:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1326:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1327:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1328:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);
1329:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1330:   pc_gamg->repart           = PETSC_FALSE;
1331:   pc_gamg->reuse_prol       = PETSC_FALSE;
1332:   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1333:   pc_gamg->min_eq_proc      = 50;
1334:   pc_gamg->coarse_eq_limit  = 50;
1335:   pc_gamg->threshold        = 0.;
1336:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1337:   pc_gamg->emax_id          = -1;
1338:   pc_gamg->current_level    = 0; /* don't need to init really */
1339:   pc_gamg->eigtarget[0]     = 0.05;
1340:   pc_gamg->eigtarget[1]     = 1.05;
1341:   pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG;

1343:   /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */
1344:   PCGAMGSetType(pc,PCGAMGAGG);
1345:   return(0);
1346: }

1350: /*@C
1351:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1352:     from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1353:     when using static libraries.

1355:  Level: developer

1357:  .keywords: PC, PCGAMG, initialize, package
1358:  .seealso: PetscInitialize()
1359: @*/
1360: PetscErrorCode PCGAMGInitializePackage(void)
1361: {

1365:   if (PCGAMGPackageInitialized) return(0);
1366:   PCGAMGPackageInitialized = PETSC_TRUE;
1367:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1368:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1369:   PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);
1370:   PetscRegisterFinalize(PCGAMGFinalizePackage);

1372:   /* general events */
1373:   PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);
1374:   PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);
1375:   PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1376:   PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1377:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1378:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1379:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);

1381: #if defined PETSC_GAMG_USE_LOG
1382:   PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1383:   PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1384:   /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1385:   /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1386:   /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1387:   PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1388:   PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1389:   PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1390:   PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1391:   PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1392:   PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1393:   PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1394:   PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1395:   PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1396:   PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1397:   PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1398:   PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1400:   /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1401:   /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1402:   /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1403:   /* create timer stages */
1404: #if defined GAMG_STAGES
1405:   {
1406:     char     str[32];
1407:     PetscInt lidx;
1408:     sprintf(str,"MG Level %d (finest)",0);
1409:     PetscLogStageRegister(str, &gamg_stages[0]);
1410:     for (lidx=1; lidx<9; lidx++) {
1411:       sprintf(str,"MG Level %d",lidx);
1412:       PetscLogStageRegister(str, &gamg_stages[lidx]);
1413:     }
1414:   }
1415: #endif
1416: #endif
1417:   return(0);
1418: }

1422: /*@C
1423:  PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is
1424:     called from PetscFinalize() automatically.

1426:  Level: developer

1428:  .keywords: Petsc, destroy, package
1429:  .seealso: PetscFinalize()
1430: @*/
1431: PetscErrorCode PCGAMGFinalizePackage(void)
1432: {

1436:   PCGAMGPackageInitialized = PETSC_FALSE;
1437:   PetscFunctionListDestroy(&GAMGList);
1438:   return(0);
1439: }

1443: /*@C
1444:  PCGAMGRegister - Register a PCGAMG implementation.

1446:  Input Parameters:
1447:  + type - string that will be used as the name of the GAMG type.
1448:  - create - function for creating the gamg context.

1450:   Level: advanced

1452:  .seealso: PCGAMGType, PCGAMG, PCGAMGSetType()
1453: @*/
1454: PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC))
1455: {

1459:   PCGAMGInitializePackage();
1460:   PetscFunctionListAdd(&GAMGList,type,create);
1461:   return(0);
1462: }