Actual source code: gamg.c

petsc-3.4.5 2014-06-29
  1: /*
  2:  GAMG geometric-algebric multigrid PC - Mark Adams 2011
  3:  */
  4: #include "petsc-private/matimpl.h"
  5: #include <../src/ksp/pc/impls/gamg/gamg.h>           /*I "petscpc.h" I*/
  6: #include <petsc-private/kspimpl.h>
  7: #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */

  9: #if defined PETSC_GAMG_USE_LOG
 10: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
 11: #endif

 13: #if defined PETSC_USE_LOG
 14: PetscLogEvent PC_GAMGGgraph_AGG;
 15: PetscLogEvent PC_GAMGGgraph_GEO;
 16: PetscLogEvent PC_GAMGCoarsen_AGG;
 17: PetscLogEvent PC_GAMGCoarsen_GEO;
 18: PetscLogEvent PC_GAMGProlongator_AGG;
 19: PetscLogEvent PC_GAMGProlongator_GEO;
 20: PetscLogEvent PC_GAMGOptprol_AGG;
 21: PetscLogEvent PC_GAMGKKTProl_AGG;
 22: #endif

 24: #define GAMG_MAXLEVELS 30

 26: /* #define GAMG_STAGES */
 27: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
 28: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
 29: #endif

 31: static PetscFunctionList GAMGList = 0;
 32: static PetscBool PCGAMGPackageInitialized;

 34: /* ----------------------------------------------------------------------------- */
 37: PetscErrorCode PCReset_GAMG(PC pc)
 38: {
 40:   PC_MG          *mg      = (PC_MG*)pc->data;
 41:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

 44:   if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */
 45:     PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__);
 46:     PetscFree(pc_gamg->data);
 47:   }
 48:   pc_gamg->data = NULL; pc_gamg->data_sz = 0;

 50:   if (pc_gamg->orig_data) {
 51:     PetscFree(pc_gamg->orig_data);
 52:   }
 53:   return(0);
 54: }

 56: /* private 2x2 Mat Nest for Stokes */
 57: typedef struct {
 58:   Mat A11,A21,A12,Amat;
 59:   IS  prim_is,constr_is;
 60: } GAMGKKTMat;

 64: static PetscErrorCode GAMGKKTMatCreate(Mat A, PetscBool iskkt, GAMGKKTMat *out)
 65: {
 67:   out->Amat = A;
 68:   if (iskkt) {
 70:     IS             is_constraint, is_prime;
 71:     PetscInt       nmin,nmax;

 73:     MatGetOwnershipRange(A, &nmin, &nmax);
 74:     MatFindZeroDiagonals(A, &is_constraint);
 75:     ISComplement(is_constraint, nmin, nmax, &is_prime);

 77:     out->prim_is   = is_prime;
 78:     out->constr_is = is_constraint;

 80:     MatGetSubMatrix(A, is_prime, is_prime,      MAT_INITIAL_MATRIX, &out->A11);
 81:     MatGetSubMatrix(A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);
 82:     MatGetSubMatrix(A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);
 83:   } else {
 84:     out->A11       = A;
 85:     out->A21       = NULL;
 86:     out->A12       = NULL;
 87:     out->prim_is   = NULL;
 88:     out->constr_is = NULL;
 89:   }
 90:   return(0);
 91: }

 95: static PetscErrorCode GAMGKKTMatDestroy(GAMGKKTMat *mat)
 96: {

100:   if (mat->A11 && mat->A11 != mat->Amat) {
101:     MatDestroy(&mat->A11);
102:   }
103:   MatDestroy(&mat->A21);
104:   MatDestroy(&mat->A12);

106:   ISDestroy(&mat->prim_is);
107:   ISDestroy(&mat->constr_is);
108:   return(0);
109: }

111: /* -------------------------------------------------------------------------- */
112: /*
113:    createLevel: create coarse op with RAP.  repartition and/or reduce number
114:      of active processors.

116:    Input Parameter:
117:    . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
118:           'pc_gamg->data_sz' are changed via repartitioning/reduction.
119:    . Amat_fine - matrix on this fine (k) level
120:    . cr_bs - coarse block size
121:    . isLast -
122:    . stokes -
123:    In/Output Parameter:
124:    . a_P_inout - prolongation operator to the next level (k-->k-1)
125:    . a_nactive_proc - number of active procs
126:    Output Parameter:
127:    . a_Amat_crs - coarse matrix that is created (k-1)
128: */

132: static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,const PetscBool stokes,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc)
133: {
134:   PetscErrorCode  ierr;
135:   PC_MG           *mg         = (PC_MG*)pc->data;
136:   PC_GAMG         *pc_gamg    = (PC_GAMG*)mg->innerctx;
137:   const PetscBool repart      = pc_gamg->repart;
138:   const PetscInt  min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
139:   Mat             Cmat,Pold=*a_P_inout;
140:   MPI_Comm        comm;
141:   PetscMPIInt     rank,size,new_size,nactive=*a_nactive_proc;
142:   PetscInt        ncrs_eq,ncrs_prim,f_bs;

145:   PetscObjectGetComm((PetscObject)Amat_fine,&comm);
146:   MPI_Comm_rank(comm, &rank);
147:   MPI_Comm_size(comm, &size);
148:   MatGetBlockSize(Amat_fine, &f_bs);
149:   /* RAP */
150:   MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);

152:   /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/
153:   ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
154:   if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows);
155:   MatGetLocalSize(Cmat, &ncrs_eq, NULL);

157:   /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */
158:   {
159:     PetscInt ncrs_eq_glob;
160:     MatGetSize(Cmat, &ncrs_eq_glob, NULL);
161:     new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
162:     if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1;
163:     else if (new_size >= nactive) new_size = nactive; /* no change, rare */
164:     if (isLast) new_size = 1;
165:   }

167:   if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
168:   else {
169:     const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
170:     PetscInt       *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old;
171:     IS             is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices;
172:     VecScatter     vecscat;
173:     PetscScalar    *array;
174:     Vec            src_crd, dest_crd;

176:     nloc_old = ncrs_eq/cr_bs;
177:     if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs);
178: #if defined PETSC_GAMG_USE_LOG
179:     PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
180: #endif
181:     /* make 'is_eq_newproc' */
182:     PetscMalloc(size*sizeof(PetscInt), &counts);
183:     if (repart && !stokes) {
184:       /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
185:       Mat adj;

187:       if (pc_gamg->verbose>0) {
188:         if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq);
189:         else {
190:           PetscInt n;
191:           MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);
192:           PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n);
193:         }
194:       }

196:       /* get 'adj' */
197:       if (cr_bs == 1) {
198:         MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);
199:       } else {
200:         /* make a scalar matrix to partition (no Stokes here) */
201:         Mat               tMat;
202:         PetscInt          Istart_crs,Iend_crs,ncols,jj,Ii;
203:         const PetscScalar *vals;
204:         const PetscInt    *idx;
205:         PetscInt          *d_nnz, *o_nnz, M, N;
206:         static PetscInt   llev = 0;

208:         PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);
209:         PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);
210:         MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);
211:         MatGetSize(Cmat, &M, &N);
212:         for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) {
213:           MatGetRow(Cmat,Ii,&ncols,0,0);
214:           d_nnz[jj] = ncols/cr_bs;
215:           o_nnz[jj] = ncols/cr_bs;
216:           MatRestoreRow(Cmat,Ii,&ncols,0,0);
217:           if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim;
218:           if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim;
219:         }

221:         MatCreate(comm, &tMat);
222:         MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);
223:         MatSetType(tMat,MATAIJ);
224:         MatSeqAIJSetPreallocation(tMat,0,d_nnz);
225:         MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
226:         PetscFree(d_nnz);
227:         PetscFree(o_nnz);

229:         for (ii = Istart_crs; ii < Iend_crs; ii++) {
230:           PetscInt dest_row = ii/cr_bs;
231:           MatGetRow(Cmat,ii,&ncols,&idx,&vals);
232:           for (jj = 0; jj < ncols; jj++) {
233:             PetscInt    dest_col = idx[jj]/cr_bs;
234:             PetscScalar v        = 1.0;
235:             MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
236:           }
237:           MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
238:         }
239:         MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
240:         MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);

242:         if (llev++ == -1) {
243:           PetscViewer viewer; char fname[32];
244:           PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);
245:           PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer);
246:           MatView(tMat, viewer);
247:           PetscViewerDestroy(&viewer);
248:         }

250:         MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);

252:         MatDestroy(&tMat);
253:       } /* create 'adj' */

255:       { /* partition: get newproc_idx */
256:         char            prefix[256];
257:         const char      *pcpre;
258:         const PetscInt  *is_idx;
259:         MatPartitioning mpart;
260:         IS              proc_is;
261:         PetscInt        targetPE;

263:         MatPartitioningCreate(comm, &mpart);
264:         MatPartitioningSetAdjacency(mpart, adj);
265:         PCGetOptionsPrefix(pc, &pcpre);
266:         PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");
267:         PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
268:         MatPartitioningSetFromOptions(mpart);
269:         MatPartitioningSetNParts(mpart, new_size);
270:         MatPartitioningApply(mpart, &proc_is);
271:         MatPartitioningDestroy(&mpart);

273:         /* collect IS info */
274:         PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);
275:         ISGetIndices(proc_is, &is_idx);
276:         targetPE = 1; /* bring to "front" of machine */
277:         /*targetPE = size/new_size;*/ /* spread partitioning across machine */
278:         for (kk = jj = 0 ; kk < nloc_old ; kk++) {
279:           for (ii = 0 ; ii < cr_bs ; ii++, jj++) {
280:             newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
281:           }
282:         }
283:         ISRestoreIndices(proc_is, &is_idx);
284:         ISDestroy(&proc_is);
285:       }
286:       MatDestroy(&adj);

288:       ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);
289:       if (newproc_idx != 0) {
290:         PetscFree(newproc_idx);
291:       }
292:     } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */

294:       PetscInt rfactor,targetPE;
295:       /* find factor */
296:       if (new_size == 1) rfactor = size; /* easy */
297:       else {
298:         PetscReal best_fact = 0.;
299:         jj = -1;
300:         for (kk = 1 ; kk <= size ; kk++) {
301:           if (size%kk==0) { /* a candidate */
302:             PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size;
303:             if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */
304:             if (fact > best_fact) {
305:               best_fact = fact; jj = kk;
306:             }
307:           }
308:         }
309:         if (jj != -1) rfactor = jj;
310:         else rfactor = 1; /* does this happen .. a prime */
311:       }
312:       new_size = size/rfactor;

314:       if (new_size==nactive) {
315:         *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
316:         PetscFree(counts);
317:         if (pc_gamg->verbose>0) {
318:           PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq);
319:         }
320:         return(0);
321:       }

323:       if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq);
324:       targetPE = rank/rfactor;
325:       ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);

327:       if (stokes) {
328:         ISCreateStride(comm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim);
329:       }
330:     } /* end simple 'is_eq_newproc' */

332:     /*
333:      Create an index set from the is_eq_newproc index set to indicate the mapping TO
334:      */
335:     ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);
336:     if (stokes) {
337:       ISPartitioningToNumbering(is_eq_newproc_prim, &is_eq_num_prim);
338:     } else is_eq_num_prim = is_eq_num;
339:     /*
340:       Determine how many equations/vertices are assigned to each processor
341:      */
342:     ISPartitioningCount(is_eq_newproc, size, counts);
343:     ncrs_eq_new = counts[rank];
344:     ISDestroy(&is_eq_newproc);
345:     if (stokes) {
346:       ISPartitioningCount(is_eq_newproc_prim, size, counts);
347:       ISDestroy(&is_eq_newproc_prim);
348:       ncrs_prim_new = counts[rank]/cr_bs; /* nodes */
349:     } else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */

351:     PetscFree(counts);
352: #if defined PETSC_GAMG_USE_LOG
353:     PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
354: #endif

356:     /* move data (for primal equations only) */
357:     /* Create a vector to contain the newly ordered element information */
358:     VecCreate(comm, &dest_crd);
359:     VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);
360:     VecSetFromOptions(dest_crd); /* this is needed! */
361:     /*
362:      There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
363:      a block size of ...).  Note, ISs are expanded into equation space by 'cr_bs'.
364:      */
365:     PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);
366:     ISGetIndices(is_eq_num_prim, &idx);
367:     for (ii=0,jj=0; ii<ncrs_prim; ii++) {
368:       PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
369:       for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
370:     }
371:     ISRestoreIndices(is_eq_num_prim, &idx);
372:     ISCreateGeneral(comm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);
373:     PetscFree(tidx);
374:     /*
375:      Create a vector to contain the original vertex information for each element
376:      */
377:     VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);
378:     for (jj=0; jj<ndata_cols; jj++) {
379:       const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows;
380:       for (ii=0; ii<ncrs_prim; ii++) {
381:         for (kk=0; kk<ndata_rows; kk++) {
382:           PetscInt    ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
383:           PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
384:           VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);
385:         }
386:       }
387:     }
388:     VecAssemblyBegin(src_crd);
389:     VecAssemblyEnd(src_crd);
390:     /*
391:       Scatter the element vertex information (still in the original vertex ordering)
392:       to the correct processor
393:     */
394:     VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);
395:     ISDestroy(&isscat);
396:     VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
397:     VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
398:     VecScatterDestroy(&vecscat);
399:     VecDestroy(&src_crd);
400:     /*
401:       Put the element vertex data into a new allocation of the gdata->ele
402:     */
403:     PetscFree(pc_gamg->data);
404:     PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);

406:     pc_gamg->data_sz = node_data_sz*ncrs_prim_new;
407:     strideNew        = ncrs_prim_new*ndata_rows;

409:     VecGetArray(dest_crd, &array);
410:     for (jj=0; jj<ndata_cols; jj++) {
411:       for (ii=0; ii<ncrs_prim_new; ii++) {
412:         for (kk=0; kk<ndata_rows; kk++) {
413:           PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
414:           pc_gamg->data[ix] = PetscRealPart(array[jx]);
415:         }
416:       }
417:     }
418:     VecRestoreArray(dest_crd, &array);
419:     VecDestroy(&dest_crd);

421:     /* move A and P (columns) with new layout */
422: #if defined PETSC_GAMG_USE_LOG
423:     PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
424: #endif

426:     /*
427:       Invert for MatGetSubMatrix
428:     */
429:     ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);
430:     ISSort(new_eq_indices); /* is this needed? */
431:     ISSetBlockSize(new_eq_indices, cr_bs);
432:     if (is_eq_num != is_eq_num_prim) {
433:       ISDestroy(&is_eq_num_prim); /* could be same as 'is_eq_num' */
434:     }
435:     ISDestroy(&is_eq_num);
436: #if defined PETSC_GAMG_USE_LOG
437:     PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
438:     PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
439: #endif
440:     /* 'a_Amat_crs' output */
441:     {
442:       Mat mat;
443:       MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);
444:       *a_Amat_crs = mat;

446:       if (!PETSC_TRUE) {
447:         PetscInt cbs, rbs;
448:         MatGetBlockSizes(Cmat, &rbs, &cbs);
449:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
450:         MatGetBlockSizes(mat, &rbs, &cbs);
451:         PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);
452:       }
453:     }
454:     MatDestroy(&Cmat);

456: #if defined PETSC_GAMG_USE_LOG
457:     PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
458: #endif
459:     /* prolongator */
460:     {
461:       IS       findices;
462:       PetscInt Istart,Iend;
463:       Mat      Pnew;
464:       MatGetOwnershipRange(Pold, &Istart, &Iend);
465: #if defined PETSC_GAMG_USE_LOG
466:       PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
467: #endif
468:       ISCreateStride(comm,Iend-Istart,Istart,1,&findices);
469:       ISSetBlockSize(findices,f_bs);
470:       MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);
471:       ISDestroy(&findices);

473:       if (!PETSC_TRUE) {
474:         PetscInt cbs, rbs;
475:         MatGetBlockSizes(Pold, &rbs, &cbs);
476:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
477:         MatGetBlockSizes(Pnew, &rbs, &cbs);
478:         PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);
479:       }
480: #if defined PETSC_GAMG_USE_LOG
481:       PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
482: #endif
483:       MatDestroy(a_P_inout);

485:       /* output - repartitioned */
486:       *a_P_inout = Pnew;
487:     }
488:     ISDestroy(&new_eq_indices);

490:     *a_nactive_proc = new_size; /* output */
491:   }

493:   /* outout matrix data */
494:   if (!PETSC_TRUE) {
495:     PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
496:     if (llev==0) {
497:       sprintf(fname,"Cmat_%d.m",llev++);
498:       PetscViewerASCIIOpen(comm,fname,&viewer);
499:       PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
500:       MatView(Amat_fine, viewer);
501:       PetscViewerDestroy(&viewer);
502:     }
503:     sprintf(fname,"Cmat_%d.m",llev++);
504:     PetscViewerASCIIOpen(comm,fname,&viewer);
505:     PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);
506:     MatView(Cmat, viewer);
507:     PetscViewerDestroy(&viewer);
508:   }
509:   return(0);
510: }

512: /* -------------------------------------------------------------------------- */
513: /*
514:    PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
515:                     by setting data structures and options.

517:    Input Parameter:
518: .  pc - the preconditioner context

520:    Application Interface Routine: PCSetUp()

522:    Notes:
523:    The interface routine PCSetUp() is not usually called directly by
524:    the user, but instead is called by PCApply() if necessary.
525: */
528: PetscErrorCode PCSetUp_GAMG(PC pc)
529: {
531:   PC_MG          *mg      = (PC_MG*)pc->data;
532:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
533:   Mat            Pmat     = pc->pmat;
534:   PetscInt       fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
535:   MPI_Comm       comm;
536:   PetscMPIInt    rank,size,nactivepe;
537:   Mat            Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
538:   PetscReal      emaxs[GAMG_MAXLEVELS];
539:   IS             *ASMLocalIDsArr[GAMG_MAXLEVELS];
540:   GAMGKKTMat     kktMatsArr[GAMG_MAXLEVELS];
541:   PetscLogDouble nnz0=0.,nnztot=0.;
542:   MatInfo        info;
543:   PetscBool      stokes = PETSC_FALSE, redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol);

546:   PetscObjectGetComm((PetscObject)pc,&comm);
547:   MPI_Comm_rank(comm,&rank);
548:   MPI_Comm_size(comm,&size);

550:   if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);

552:   if (pc_gamg->setup_count++ > 0) {
553:     if (redo_mesh_setup) {
554:       /* reset everything */
555:       PCReset_MG(pc);
556:       pc->setupcalled = 0;
557:     } else {
558:       PC_MG_Levels **mglevels = mg->levels;
559:       /* just do Galerkin grids */
560:       Mat          B,dA,dB;

562:      if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet");
563:       if (pc_gamg->Nlevels > 1) {
564:         /* currently only handle case where mat and pmat are the same on coarser levels */
565:         KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);
566:         /* (re)set to get dirty flag */
567:         KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);

569:         for (level=pc_gamg->Nlevels-2; level>=0; level--) {
570:           /* the first time through the matrix structure has changed from repartitioning */
571:           if (pc_gamg->setup_count==2) {
572:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
573:             MatDestroy(&mglevels[level]->A);

575:             mglevels[level]->A = B;
576:           } else {
577:             KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);
578:             MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
579:           }
580:           KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);
581:           dB   = B;
582:         }
583:       }

585:       PCSetUp_MG(pc);

587:       /* PCSetUp_MG seems to insists on setting this to GMRES */
588:       KSPSetType(mglevels[0]->smoothd, KSPPREONLY);
589:       return(0);
590:     }
591:   }

593:   PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,NULL);

595:   GAMGKKTMatCreate(Pmat, stokes, &kktMatsArr[0]);

597:   if (!pc_gamg->data) {
598:     if (pc_gamg->orig_data) {
599:       MatGetBlockSize(Pmat, &bs);
600:       MatGetLocalSize(Pmat, &qq, NULL);

602:       pc_gamg->data_sz        = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols;
603:       pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows;
604:       pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols;

606:       PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);
607:       for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq];
608:     } else {
609:       if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data");
610:       if (stokes) SETERRQ(comm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems");
611:       pc_gamg->ops->createdefaultdata(pc, kktMatsArr[0].A11);
612:     }
613:   }

615:   /* cache original data for reuse */
616:   if (!pc_gamg->orig_data && redo_mesh_setup) {
617:     PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);
618:     for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq];
619:     pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows;
620:     pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols;
621:   }

623:   /* get basic dims */
624:   if (stokes) bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */
625:   else {
626:     MatGetBlockSize(Pmat, &bs);
627:   }

629:   MatGetSize(Pmat, &M, &qq);
630:   if (pc_gamg->verbose) {
631:     PetscInt NN = M;
632:     if (pc_gamg->verbose==1) {
633:        MatGetInfo(Pmat,MAT_LOCAL,&info);
634:       MatGetLocalSize(Pmat, &NN, &qq);
635:     } else {
636:       MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
637:     }
638:     nnz0   = info.nz_used;
639:     nnztot = info.nz_used;
640:     PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
641:                          rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
642:                          (int)(nnz0/(PetscReal)NN),size);
643:   }

645:   /* Get A_i and R_i */
646:   for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */
647:        level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit);  /* && (size==1 || nactivepe>1); */
648:        level++) {
649:     level1 = level + 1;
650: #if defined PETSC_GAMG_USE_LOG
651:     PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
652: #if (defined GAMG_STAGES)
653:     PetscLogStagePush(gamg_stages[level]);
654: #endif
655: #endif
656:     /* deal with Stokes, get sub matrices */
657:     if (level > 0) {
658:       GAMGKKTMatCreate(Aarr[level], stokes, &kktMatsArr[level]);
659:     }
660:     { /* construct prolongator */
661:       Mat              Gmat;
662:       PetscCoarsenData *agg_lists;
663:       Mat              Prol11,Prol22;

665:       pc_gamg->ops->graph(pc,kktMatsArr[level].A11, &Gmat);
666:       pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);
667:       pc_gamg->ops->prolongator(pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11);

669:       /* could have failed to create new level */
670:       if (Prol11) {
671:         /* get new block size of coarse matrices */
672:         MatGetBlockSizes(Prol11, NULL, &bs);

674:         if (stokes) {
675:           if (!pc_gamg->ops->formkktprol) SETERRQ(comm,PETSC_ERR_USER,"Stokes not supportd by AMG method.");
676:           /* R A12 == (T = A21 P)';  G = T' T; coarsen G; form plain agg with G */
677:           pc_gamg->ops->formkktprol(pc, Prol11, kktMatsArr[level].A21, &Prol22);
678:         }

680:         if (pc_gamg->ops->optprol) {
681:           /* smooth */
682:           pc_gamg->ops->optprol(pc, kktMatsArr[level].A11, &Prol11);
683:         }

685:         if (stokes) {
686:           IS  is_row[2];
687:           Mat a[4];

689:           is_row[0] = kktMatsArr[level].prim_is; is_row[1] = kktMatsArr[level].constr_is;
690:           a[0]      = Prol11; a[1] = NULL; a[2] = NULL; a[3] = Prol22;
691:           MatCreateNest(comm,2,is_row, 2, is_row, a, &Parr[level1]);
692:         } else Parr[level1] = Prol11;
693:       } else Parr[level1] = NULL;

695:       if (pc_gamg->use_aggs_in_gasm) {
696:         PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);
697:       }

699:       MatDestroy(&Gmat);
700:       PetscCDDestroy(agg_lists);
701:     } /* construct prolongator scope */
702: #if defined PETSC_GAMG_USE_LOG
703:     PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
704: #endif
705:     /* cache eigen estimate */
706:     if (pc_gamg->emax_id != -1) {
707:       PetscBool flag;
708:       PetscObjectComposedDataGetReal((PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag);
709:       if (!flag) emaxs[level] = -1.;
710:     } else emaxs[level] = -1.;
711:     if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
712:     if (!Parr[level1]) {
713:       if (pc_gamg->verbose) {
714:          PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);
715:       }
716:       break;
717:     }
718: #if defined PETSC_GAMG_USE_LOG
719:     PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
720: #endif

722:     createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
723:                        stokes, &Parr[level1], &Aarr[level1], &nactivepe);

725: #if defined PETSC_GAMG_USE_LOG
726:     PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
727: #endif
728:     MatGetSize(Aarr[level1], &M, &qq);

730:     if (pc_gamg->verbose > 0) {
731:       PetscInt NN = M;
732:       if (pc_gamg->verbose==1) {
733:         MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
734:         MatGetLocalSize(Aarr[level1], &NN, &qq);
735:       } else {
736:         MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);
737:       }

739:       nnztot += info.nz_used;
740:       PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
741:                             rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
742:                             (int)(info.nz_used/(PetscReal)NN), nactivepe);
743:     }

745:     /* stop if one node -- could pull back for singular problems */
746:     if (M/pc_gamg->data_cell_cols < 2) {
747:       level++;
748:       break;
749:     }
750: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
751:     PetscLogStagePop();
752: #endif
753:   } /* levels */

755:   if (pc_gamg->data) {
756:     PetscFree(pc_gamg->data);
757:     pc_gamg->data = NULL;
758:   }

760:   if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
761:   pc_gamg->Nlevels = level + 1;
762:   fine_level       = level;
763:   PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);

765:   /* simple setup */
766:   if (!PETSC_TRUE) {
767:     PC_MG_Levels **mglevels = mg->levels;
768:     for (lidx=0,level=pc_gamg->Nlevels-1;
769:          lidx<fine_level;
770:          lidx++, level--) {
771:       PCMGSetInterpolation(pc, lidx+1, Parr[level]);
772:       KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);
773:       MatDestroy(&Parr[level]);
774:       MatDestroy(&Aarr[level]);
775:     }
776:     KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);

778:     PCSetUp_MG(pc);
779:   } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */
780:     /* set default smoothers & set operators */
781:     for (lidx = 1, level = pc_gamg->Nlevels-2;
782:          lidx <= fine_level;
783:          lidx++, level--) {
784:       KSP smoother;
785:       PC  subpc;

787:       PCMGGetSmoother(pc, lidx, &smoother);
788:       KSPGetPC(smoother, &subpc);

790:       KSPSetNormType(smoother, KSP_NORM_NONE);
791:       /* set ops */
792:       KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);
793:       PCMGSetInterpolation(pc, lidx, Parr[level+1]);

795:       /* create field split PC, get subsmoother */
796:       if (stokes) {
797:         KSP      *ksps;
798:         PetscInt nn;
799:         PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);
800:         PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);
801:         PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
802:         smoother = ksps[0];
803:         KSPGetPC(smoother, &subpc);
804:         PetscFree(ksps);
805:       }
806:       GAMGKKTMatDestroy(&kktMatsArr[level]);

808:       /* set defaults */
809:       KSPSetType(smoother, KSPCHEBYSHEV);

811:       /* override defaults and command line args (!) */
812:       if (pc_gamg->use_aggs_in_gasm) {
813:         PetscInt sz;
814:         IS       *is;

816:         sz   = nASMBlocksArr[level];
817:         is   = ASMLocalIDsArr[level];
818:         PCSetType(subpc, PCGASM);
819:         if (sz==0) {
820:           IS       is;
821:           PetscInt my0,kk;
822:           MatGetOwnershipRange(Aarr[level], &my0, &kk);
823:           ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);
824:           PCGASMSetSubdomains(subpc, 1, &is, NULL);
825:           ISDestroy(&is);
826:         } else {
827:           PetscInt kk;
828:           PCGASMSetSubdomains(subpc, sz, is, NULL);
829:           for (kk=0; kk<sz; kk++) {
830:             ISDestroy(&is[kk]);
831:           }
832:           PetscFree(is);
833:         }
834:         PCGASMSetOverlap(subpc, 0);

836:         ASMLocalIDsArr[level] = NULL;
837:         nASMBlocksArr[level]  = 0;
838:         PCGASMSetType(subpc, PC_GASM_BASIC);
839:       } else {
840:         PCSetType(subpc, PCJACOBI);
841:       }
842:     }
843:     {
844:       /* coarse grid */
845:       KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
846:       Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
847:       PCMGGetSmoother(pc, lidx, &smoother);
848:       KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);
849:       KSPSetNormType(smoother, KSP_NORM_NONE);
850:       KSPGetPC(smoother, &subpc);
851:       PCSetType(subpc, PCBJACOBI);
852:       PCSetUp(subpc);
853:       PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);
854:       if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii);
855:       KSPGetPC(k2[0],&pc2);
856:       PCSetType(pc2, PCLU);
857:       PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);
858:       KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);
859:       /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in
860:        * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to
861:        * view every subdomain as though they were different. */
862:       ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE;
863:     }

865:     /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
866:     PetscObjectOptionsBegin((PetscObject)pc);
867:     PCSetFromOptions_MG(pc);
868:     PetscOptionsEnd();
869:     if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");

871:     /* create cheby smoothers */
872:     for (lidx = 1, level = pc_gamg->Nlevels-2;
873:          lidx <= fine_level;
874:          lidx++, level--) {
875:       KSP       smoother;
876:       PetscBool flag;
877:       PC        subpc;

879:       PCMGGetSmoother(pc, lidx, &smoother);
880:       KSPGetPC(smoother, &subpc);

882:       /* create field split PC, get subsmoother */
883:       if (stokes) {
884:         KSP      *ksps;
885:         PetscInt nn;
886:         PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
887:         smoother = ksps[0];
888:         KSPGetPC(smoother, &subpc);
889:         PetscFree(ksps);
890:       }

892:       /* do my own cheby */
893:       PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);
894:       if (flag) {
895:         PetscReal emax, emin;
896:         PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);
897:         if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
898:         else { /* eigen estimate 'emax' */
899:           KSP eksp;
900:           Mat Lmat = Aarr[level];
901:           Vec bb, xx;

903:           MatGetVecs(Lmat, &bb, 0);
904:           MatGetVecs(Lmat, &xx, 0);
905:           {
906:             PetscRandom rctx;
907:             PetscRandomCreate(comm,&rctx);
908:             PetscRandomSetFromOptions(rctx);
909:             VecSetRandom(bb,rctx);
910:             PetscRandomDestroy(&rctx);
911:           }

913:           /* zeroing out BC rows -- needed for crazy matrices */
914:           {
915:             PetscInt    Istart,Iend,ncols,jj,Ii;
916:             PetscScalar zero = 0.0;
917:             MatGetOwnershipRange(Lmat, &Istart, &Iend);
918:             for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
919:               MatGetRow(Lmat,Ii,&ncols,0,0);
920:               if (ncols <= 1) {
921:                 VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);
922:               }
923:               MatRestoreRow(Lmat,Ii,&ncols,0,0);
924:             }
925:             VecAssemblyBegin(bb);
926:             VecAssemblyEnd(bb);
927:           }

929:           KSPCreate(comm, &eksp);
930:           KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);
931:           KSPSetNormType(eksp, KSP_NORM_NONE);
932:           KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
933:           KSPAppendOptionsPrefix(eksp, "gamg_est_");
934:           KSPSetFromOptions(eksp);

936:           KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);
937:           KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);
938:           KSPSetComputeSingularValues(eksp,PETSC_TRUE);

940:           /* set PC type to be same as smoother */
941:           KSPSetPC(eksp, subpc);

943:           /* solve - keep stuff out of logging */
944:           PetscLogEventDeactivate(KSP_Solve);
945:           PetscLogEventDeactivate(PC_Apply);
946:           KSPSolve(eksp, bb, xx);
947:           PetscLogEventActivate(KSP_Solve);
948:           PetscLogEventActivate(PC_Apply);

950:           KSPComputeExtremeSingularValues(eksp, &emax, &emin);

952:           VecDestroy(&xx);
953:           VecDestroy(&bb);
954:           KSPDestroy(&eksp);

956:           if (pc_gamg->verbose > 0) {
957:             PetscInt N1, tt;
958:             MatGetSize(Aarr[level], &N1, &tt);
959:             PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
960:           }
961:         }
962:         {
963:           PetscInt N1, N0;
964:           MatGetSize(Aarr[level], &N1, NULL);
965:           MatGetSize(Aarr[level+1], &N0, NULL);
966:           /* heuristic - is this crap? */
967:           /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */
968:           emin  = emax * pc_gamg->eigtarget[0];
969:           emax *= pc_gamg->eigtarget[1];
970:         }
971:         KSPChebyshevSetEigenvalues(smoother, emax, emin);
972:       } /* setup checby flag */
973:     } /* non-coarse levels */

975:     /* clean up */
976:     for (level=1; level<pc_gamg->Nlevels; level++) {
977:       MatDestroy(&Parr[level]);
978:       MatDestroy(&Aarr[level]);
979:     }

981:     PCSetUp_MG(pc);

983:     if (PETSC_TRUE) {
984:       KSP smoother;  /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
985:       PCMGGetSmoother(pc, 0, &smoother);
986:       KSPSetType(smoother, KSPPREONLY);
987:     }
988:   } else {
989:     KSP smoother;
990:     if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__);
991:     PCMGGetSmoother(pc, 0, &smoother);
992:     KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);
993:     KSPSetType(smoother, KSPPREONLY);
994:     PCSetUp_MG(pc);
995:   }
996:   return(0);
997: }

999: /* ------------------------------------------------------------------------- */
1000: /*
1001:  PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
1002:    that was created with PCCreate_GAMG().

1004:    Input Parameter:
1005: .  pc - the preconditioner context

1007:    Application Interface Routine: PCDestroy()
1008: */
1011: PetscErrorCode PCDestroy_GAMG(PC pc)
1012: {
1014:   PC_MG          *mg     = (PC_MG*)pc->data;
1015:   PC_GAMG        *pc_gamg= (PC_GAMG*)mg->innerctx;

1018:   PCReset_GAMG(pc);
1019:   if (pc_gamg->ops->destroy) {
1020:     (*pc_gamg->ops->destroy)(pc);
1021:   }
1022:   PetscFree(pc_gamg->ops);
1023:   PetscFree(pc_gamg->gamg_type_name);
1024:   PetscFree(pc_gamg);
1025:   PCDestroy_MG(pc);
1026:   return(0);
1027: }


1032: /*@
1033:    PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
1034:    processor reduction.

1036:    Not Collective on PC

1038:    Input Parameters:
1039: .  pc - the preconditioner context


1042:    Options Database Key:
1043: .  -pc_gamg_process_eq_limit

1045:    Level: intermediate

1047:    Concepts: Unstructured multrigrid preconditioner

1049: .seealso: ()
1050: @*/
1051: PetscErrorCode  PCGAMGSetProcEqLim(PC pc, PetscInt n)
1052: {

1057:   PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
1058:   return(0);
1059: }

1063: static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
1064: {
1065:   PC_MG   *mg      = (PC_MG*)pc->data;
1066:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1069:   if (n>0) pc_gamg->min_eq_proc = n;
1070:   return(0);
1071: }

1075: /*@
1076:    PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.

1078:  Collective on PC

1080:    Input Parameters:
1081: .  pc - the preconditioner context


1084:    Options Database Key:
1085: .  -pc_gamg_coarse_eq_limit

1087:    Level: intermediate

1089:    Concepts: Unstructured multrigrid preconditioner

1091: .seealso: ()
1092:  @*/
1093: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
1094: {

1099:   PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
1100:   return(0);
1101: }

1105: static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1106: {
1107:   PC_MG   *mg      = (PC_MG*)pc->data;
1108:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1111:   if (n>0) pc_gamg->coarse_eq_limit = n;
1112:   return(0);
1113: }

1117: /*@
1118:    PCGAMGSetRepartitioning - Repartition the coarse grids

1120:    Collective on PC

1122:    Input Parameters:
1123: .  pc - the preconditioner context


1126:    Options Database Key:
1127: .  -pc_gamg_repartition

1129:    Level: intermediate

1131:    Concepts: Unstructured multrigrid preconditioner

1133: .seealso: ()
1134: @*/
1135: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1136: {

1141:   PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1142:   return(0);
1143: }

1147: static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1148: {
1149:   PC_MG   *mg      = (PC_MG*)pc->data;
1150:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1153:   pc_gamg->repart = n;
1154:   return(0);
1155: }

1159: /*@
1160:    PCGAMGSetReuseProl - Reuse prlongation

1162:    Collective on PC

1164:    Input Parameters:
1165: .  pc - the preconditioner context


1168:    Options Database Key:
1169: .  -pc_gamg_reuse_interpolation

1171:    Level: intermediate

1173:    Concepts: Unstructured multrigrid preconditioner

1175: .seealso: ()
1176: @*/
1177: PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n)
1178: {

1183:   PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));
1184:   return(0);
1185: }

1189: static PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n)
1190: {
1191:   PC_MG   *mg      = (PC_MG*)pc->data;
1192:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1195:   pc_gamg->reuse_prol = n;
1196:   return(0);
1197: }

1201: /*@
1202:    PCGAMGSetUseASMAggs -

1204:    Collective on PC

1206:    Input Parameters:
1207: .  pc - the preconditioner context


1210:    Options Database Key:
1211: .  -pc_gamg_use_agg_gasm

1213:    Level: intermediate

1215:    Concepts: Unstructured multrigrid preconditioner

1217: .seealso: ()
1218: @*/
1219: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1220: {

1225:   PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1226:   return(0);
1227: }

1231: static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1232: {
1233:   PC_MG   *mg      = (PC_MG*)pc->data;
1234:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1237:   pc_gamg->use_aggs_in_gasm = n;
1238:   return(0);
1239: }

1243: /*@
1244:    PCGAMGSetNlevels -

1246:    Not collective on PC

1248:    Input Parameters:
1249: .  pc - the preconditioner context


1252:    Options Database Key:
1253: .  -pc_mg_levels

1255:    Level: intermediate

1257:    Concepts: Unstructured multrigrid preconditioner

1259: .seealso: ()
1260: @*/
1261: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1262: {

1267:   PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1268:   return(0);
1269: }

1273: static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1274: {
1275:   PC_MG   *mg      = (PC_MG*)pc->data;
1276:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1279:   pc_gamg->Nlevels = n;
1280:   return(0);
1281: }

1285: /*@
1286:    PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph

1288:    Not collective on PC

1290:    Input Parameters:
1291: .  pc - the preconditioner context


1294:    Options Database Key:
1295: .  -pc_gamg_threshold

1297:    Level: intermediate

1299:    Concepts: Unstructured multrigrid preconditioner

1301: .seealso: ()
1302: @*/
1303: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1304: {

1309:   PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1310:   return(0);
1311: }

1315: static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1316: {
1317:   PC_MG   *mg      = (PC_MG*)pc->data;
1318:   PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;

1321:   pc_gamg->threshold = n;
1322:   return(0);
1323: }

1327: /*@
1328:    PCGAMGSetType - Set solution method - calls sub create method

1330:    Collective on PC

1332:    Input Parameters:
1333: .  pc - the preconditioner context


1336:    Options Database Key:
1337: .  -pc_gamg_type

1339:    Level: intermediate

1341:    Concepts: Unstructured multrigrid preconditioner

1343: .seealso: ()
1344: @*/
1345: PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type)
1346: {

1351:   PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));
1352:   return(0);
1353: }

1357: static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type)
1358: {
1359:   PetscErrorCode ierr,(*r)(PC);
1360:   PC_MG          *mg      = (PC_MG*)pc->data;
1361:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;

1364:   PetscFunctionListFind(GAMGList,type,&r);
1365:   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1366:   if (pc_gamg->ops->destroy) {
1367:     (*pc_gamg->ops->destroy)(pc);
1368:     PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));
1369:   }
1370:   PetscFree(pc_gamg->gamg_type_name);
1371:   PetscStrallocpy(type,&pc_gamg->gamg_type_name);
1372:   (*r)(pc);
1373:   return(0);
1374: }

1378: PetscErrorCode PCSetFromOptions_GAMG(PC pc)
1379: {
1381:   PC_MG          *mg      = (PC_MG*)pc->data;
1382:   PC_GAMG        *pc_gamg = (PC_GAMG*)mg->innerctx;
1383:   PetscBool      flag;
1384:   PetscInt       two   = 2;
1385:   MPI_Comm       comm;

1388:   PetscObjectGetComm((PetscObject)pc,&comm);
1389:   PetscOptionsHead("GAMG options");
1390:   {
1391:     /* -pc_gamg_type */
1392:     {
1393:       char tname[256] = PCGAMGAGG;
1394:       const char *deftype = pc_gamg->gamg_type_name ? pc_gamg->gamg_type_name : tname;
1395:       PetscOptionsList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), &flag);
1396:       /* call PCCreateGAMG_XYZ */
1397:       if (flag || !pc_gamg->gamg_type_name) {
1398:         PCGAMGSetType(pc, flag ? tname : deftype);
1399:       }
1400:     }
1401:     /* -pc_gamg_verbose */
1402:     PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1403:                            "none", pc_gamg->verbose,
1404:                            &pc_gamg->verbose, NULL);
1405:     /* -pc_gamg_repartition */
1406:     PetscOptionsBool("-pc_gamg_repartition",
1407:                             "Repartion coarse grids (false)",
1408:                             "PCGAMGRepartitioning",
1409:                             pc_gamg->repart,
1410:                             &pc_gamg->repart,
1411:                             &flag);
1412:     /* -pc_gamg_reuse_interpolation */
1413:     PetscOptionsBool("-pc_gamg_reuse_interpolation",
1414:                             "Reuse prolongation operator (true)",
1415:                             "PCGAMGReuseProl",
1416:                             pc_gamg->reuse_prol,
1417:                             &pc_gamg->reuse_prol,
1418:                             &flag);
1419:     /* -pc_gamg_use_agg_gasm */
1420:     PetscOptionsBool("-pc_gamg_use_agg_gasm",
1421:                             "Use aggregation agragates for GASM smoother (false)",
1422:                             "PCGAMGUseASMAggs",
1423:                             pc_gamg->use_aggs_in_gasm,
1424:                             &pc_gamg->use_aggs_in_gasm,
1425:                             &flag);
1426:     /* -pc_gamg_process_eq_limit */
1427:     PetscOptionsInt("-pc_gamg_process_eq_limit",
1428:                            "Limit (goal) on number of equations per process on coarse grids",
1429:                            "PCGAMGSetProcEqLim",
1430:                            pc_gamg->min_eq_proc,
1431:                            &pc_gamg->min_eq_proc,
1432:                            &flag);
1433:     /* -pc_gamg_coarse_eq_limit */
1434:     PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1435:                            "Limit on number of equations for the coarse grid",
1436:                            "PCGAMGSetCoarseEqLim",
1437:                            pc_gamg->coarse_eq_limit,
1438:                            &pc_gamg->coarse_eq_limit,
1439:                            &flag);
1440:     /* -pc_gamg_threshold */
1441:     PetscOptionsReal("-pc_gamg_threshold",
1442:                             "Relative threshold to use for dropping edges in aggregation graph",
1443:                             "PCGAMGSetThreshold",
1444:                             pc_gamg->threshold,
1445:                             &pc_gamg->threshold,
1446:                             &flag);
1447:     if (flag && pc_gamg->verbose) {
1448:       PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1449:     }
1450:     /* -pc_gamg_eigtarget */
1451:     PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);
1452:     PetscOptionsInt("-pc_mg_levels",
1453:                            "Set number of MG levels",
1454:                            "PCGAMGSetNlevels",
1455:                            pc_gamg->Nlevels,
1456:                            &pc_gamg->Nlevels,
1457:                            &flag);

1459:     /* set options for subtype */
1460:     if (pc_gamg->ops->setfromoptions) {(*pc_gamg->ops->setfromoptions)(pc);}
1461:   }
1462:   PetscOptionsTail();
1463:   return(0);
1464: }

1466: /* -------------------------------------------------------------------------- */
1467: /*MC
1468:      PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1469:        - This is the entry point to GAMG, registered in pcregis.c

1471:    Options Database Keys:
1472:    Multigrid options(inherited)
1473: +  -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1474: .  -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1475: .  -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1476: -  -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade

1478:   Level: intermediate

1480:   Concepts: multigrid

1482: .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1483:            PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1484:            PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1485:            PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1486:            PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1487: M*/

1491: PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc)
1492: {
1494:   PC_GAMG        *pc_gamg;
1495:   PC_MG          *mg;
1496: #if defined PETSC_GAMG_USE_LOG
1497:   static long count = 0;
1498: #endif

1501:   /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1502:   PCSetType(pc, PCMG); /* calls PCCreate_MG() and MGCreate_Private() */
1503:   PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);

1505:   /* create a supporting struct and attach it to pc */
1506:   PetscNewLog(pc, PC_GAMG, &pc_gamg);
1507:   mg           = (PC_MG*)pc->data;
1508:   mg->galerkin = 2;             /* Use Galerkin, but it is computed externally */
1509:   mg->innerctx = pc_gamg;

1511:   PetscNewLog(pc,struct _PCGAMGOps,&pc_gamg->ops);

1513:   pc_gamg->setup_count = 0;
1514:   /* these should be in subctx but repartitioning needs simple arrays */
1515:   pc_gamg->data_sz = 0;
1516:   pc_gamg->data    = 0;

1518:   /* register AMG type */
1519: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
1520:   PCGAMGInitializePackage();
1521: #endif

1523:   /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1524:   pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1525:   pc->ops->setup          = PCSetUp_GAMG;
1526:   pc->ops->reset          = PCReset_GAMG;
1527:   pc->ops->destroy        = PCDestroy_GAMG;

1529:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);
1530:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);
1531:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);
1532:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseProl_C",PCGAMGSetReuseProl_GAMG);
1533:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);
1534:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);
1535:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);
1536:   PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);
1537:   pc_gamg->repart           = PETSC_FALSE;
1538:   pc_gamg->reuse_prol       = PETSC_FALSE;
1539:   pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1540:   pc_gamg->min_eq_proc      = 50;
1541:   pc_gamg->coarse_eq_limit  = 800;
1542:   pc_gamg->threshold        = 0.;
1543:   pc_gamg->Nlevels          = GAMG_MAXLEVELS;
1544:   pc_gamg->verbose          = 0;
1545:   pc_gamg->emax_id          = -1;
1546:   pc_gamg->eigtarget[0]     = 0.05;
1547:   pc_gamg->eigtarget[1]     = 1.05;

1549:   /* private events */
1550: #if defined PETSC_GAMG_USE_LOG
1551:   if (count++ == 0) {
1552:     PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1553:     PetscLogEventRegister("  Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1554:     /* PetscLogEventRegister("    G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1555:     /* PetscLogEventRegister("    G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1556:     /* PetscLogEventRegister("    G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1557:     PetscLogEventRegister("  MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1558:     PetscLogEventRegister("  geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1559:     PetscLogEventRegister("  geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1560:     PetscLogEventRegister("    search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1561:     PetscLogEventRegister("  SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1562:     PetscLogEventRegister("  SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1563:     PetscLogEventRegister("  SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1564:     PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1565:     PetscLogEventRegister("  repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1566:     PetscLogEventRegister("  Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1567:     PetscLogEventRegister("  Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1568:     PetscLogEventRegister("  Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);

1570:     /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1571:     /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1572:     /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1573:     /* create timer stages */
1574: #if defined GAMG_STAGES
1575:     {
1576:       char     str[32];
1577:       PetscInt lidx;
1578:       sprintf(str,"MG Level %d (finest)",0);
1579:       PetscLogStageRegister(str, &gamg_stages[0]);
1580:       for (lidx=1; lidx<9; lidx++) {
1581:         sprintf(str,"MG Level %d",lidx);
1582:         PetscLogStageRegister(str, &gamg_stages[lidx]);
1583:       }
1584:     }
1585: #endif
1586:   }
1587: #endif
1588:   /* general events */
1589: #if defined PETSC_USE_LOG
1590:   PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1591:   PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1592:   PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1593:   PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1594:   PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1595:   PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1596:   PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1597:   PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);
1598: #endif

1600:   return(0);
1601: }

1605: /*@C
1606:  PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called
1607:  from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG()
1608:  when using static libraries.

1610:  Level: developer

1612:  .keywords: PC, PCGAMG, initialize, package
1613:  .seealso: PetscInitialize()
1614: @*/
1615: PetscErrorCode PCGAMGInitializePackage(void)
1616: {

1620:   if (PCGAMGPackageInitialized) return(0);
1621:   PCGAMGPackageInitialized = PETSC_TRUE;
1622:   PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);
1623:   PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);
1624:   PetscRegisterFinalize(PCGAMGFinalizePackage);
1625:   return(0);
1626: }

1630: /*@C
1631:  PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is
1632:  called from PetscFinalize().

1634:  Level: developer

1636:  .keywords: Petsc, destroy, package
1637:  .seealso: PetscFinalize()
1638: @*/
1639: PetscErrorCode PCGAMGFinalizePackage(void)
1640: {

1644:   PCGAMGPackageInitialized = PETSC_FALSE;
1645:   PetscFunctionListDestroy(&GAMGList);
1646:   return(0);
1647: }