Actual source code: gamg.c
petsc-3.3-p7 2013-05-11
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include "petsc-private/matimpl.h"
5: #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/
6: #include <petsc-private/kspimpl.h>
8: #if defined PETSC_GAMG_USE_LOG
9: PetscLogEvent petsc_gamg_setup_events[NUM_SET];
10: #endif
12: #if defined PETSC_USE_LOG
13: PetscLogEvent PC_GAMGGgraph_AGG;
14: PetscLogEvent PC_GAMGGgraph_GEO;
15: PetscLogEvent PC_GAMGCoarsen_AGG;
16: PetscLogEvent PC_GAMGCoarsen_GEO;
17: PetscLogEvent PC_GAMGProlongator_AGG;
18: PetscLogEvent PC_GAMGProlongator_GEO;
19: PetscLogEvent PC_GAMGOptprol_AGG;
20: PetscLogEvent PC_GAMGKKTProl_AGG;
21: #endif
23: #define GAMG_MAXLEVELS 30
25: /* #define GAMG_STAGES */
26: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
27: static PetscLogStage gamg_stages[GAMG_MAXLEVELS];
28: #endif
30: static PetscFList GAMGList = 0;
32: /* ----------------------------------------------------------------------------- */
35: PetscErrorCode PCReset_GAMG(PC pc)
36: {
37: PetscErrorCode ierr;
38: PC_MG *mg = (PC_MG*)pc->data;
39: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
42: if( pc_gamg->data ) { /* this should not happen, cleaned up in SetUp */
43: PetscFree( pc_gamg->data );
44: }
45: pc_gamg->data = PETSC_NULL; pc_gamg->data_sz = 0;
46: return(0);
47: }
49: /* private 2x2 Mat Nest for Stokes */
50: typedef struct{
51: Mat A11,A21,A12,Amat;
52: IS prim_is,constr_is;
53: }GAMGKKTMat;
57: static PetscErrorCode GAMGKKTMatCreate( Mat A, PetscBool iskkt, GAMGKKTMat *out )
58: {
60: out->Amat = A;
61: if( iskkt ){
62: PetscErrorCode ierr;
63: IS is_constraint, is_prime;
64: PetscInt nmin,nmax;
66: MatGetOwnershipRange( A, &nmin, &nmax );
67: MatFindZeroDiagonals( A, &is_constraint );
68: ISComplement( is_constraint, nmin, nmax, &is_prime );
69: out->prim_is = is_prime;
70: out->constr_is = is_constraint;
71:
72: MatGetSubMatrix( A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);
73: MatGetSubMatrix( A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);
74: MatGetSubMatrix( A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);
75: PetscPrintf(((PetscObject)A)->comm,"[%d]%s N=%d N_11=%d\n",0,__FUNCT__,A->rmap->N,out->A11->rmap->N);
76: }
77: else {
78: out->A11 = A;
79: out->A21 = PETSC_NULL;
80: out->A12 = PETSC_NULL;
81: out->prim_is = PETSC_NULL;
82: out->constr_is = PETSC_NULL;
83: }
84: return(0);
85: }
89: static PetscErrorCode GAMGKKTMatDestroy( GAMGKKTMat *mat )
90: {
91: PetscErrorCode ierr;
94: if( mat->A11 && mat->A11 != mat->Amat ) {
95: MatDestroy( &mat->A11 );
96: }
97: MatDestroy( &mat->A21 );
98: MatDestroy( &mat->A12 );
100: ISDestroy( &mat->prim_is );
101: ISDestroy( &mat->constr_is );
103: return(0);
104: }
106: /* -------------------------------------------------------------------------- */
107: /*
108: createLevel: create coarse op with RAP. repartition and/or reduce number
109: of active processors.
111: Input Parameter:
112: . pc - parameters + side effect: coarse data in 'pc_gamg->data' and
113: 'pc_gamg->data_sz' are changed via repartitioning/reduction.
114: . Amat_fine - matrix on this fine (k) level
115: . cr_bs - coarse block size
116: . isLast -
117: . stokes -
118: In/Output Parameter:
119: . a_P_inout - prolongation operator to the next level (k-->k-1)
120: . a_nactive_proc - number of active procs
121: Output Parameter:
122: . a_Amat_crs - coarse matrix that is created (k-1)
123: */
127: static PetscErrorCode createLevel( const PC pc,
128: const Mat Amat_fine,
129: const PetscInt cr_bs,
130: const PetscBool isLast,
131: const PetscBool stokes,
132: Mat *a_P_inout,
133: Mat *a_Amat_crs,
134: PetscMPIInt *a_nactive_proc
135: )
136: {
137: PetscErrorCode ierr;
138: PC_MG *mg = (PC_MG*)pc->data;
139: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
140: const PetscBool repart = pc_gamg->repart;
141: const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit;
142: Mat Cmat,Pold=*a_P_inout;
143: MPI_Comm wcomm = ((PetscObject)Amat_fine)->comm;
144: PetscMPIInt mype,npe,new_npe,nactive=*a_nactive_proc;
145: PetscInt ncrs_eq,ncrs_prim,f_bs;
148: MPI_Comm_rank( wcomm, &mype );
149: MPI_Comm_size( wcomm, &npe );
150: MatGetBlockSize( Amat_fine, &f_bs );
151: /* RAP */
152: MatPtAP( Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat );
154: /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/
155: ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows;
156: assert(pc_gamg->data_sz%(pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)==0);
157: MatGetLocalSize( Cmat, &ncrs_eq, PETSC_NULL );
158:
159: /* get number of PEs to make active 'new_npe', reduce, can be any integer 1-P */
160: {
161: PetscInt ncrs_eq_glob,ncrs_eq_ave;
162: MatGetSize( Cmat, &ncrs_eq_glob, PETSC_NULL );
163: ncrs_eq_ave = ncrs_eq_glob/npe;
164: new_npe = (PetscMPIInt)((float)ncrs_eq_ave/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */
165: if( new_npe == 0 || ncrs_eq_ave < coarse_max ) new_npe = 1;
166: else if ( new_npe >= nactive ) new_npe = nactive; /* no change, rare */
167: if( isLast ) new_npe = 1;
168: }
170: if( !repart && new_npe==nactive ) {
171: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */
172: }
173: else {
174: const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols;
175: PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old;
176: IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices;
177: VecScatter vecscat;
178: PetscScalar *array;
179: Vec src_crd, dest_crd;
181: nloc_old = ncrs_eq/cr_bs; assert(ncrs_eq%cr_bs==0);
182: #if defined PETSC_GAMG_USE_LOG
183: PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);
184: #endif
185: /* make 'is_eq_newproc' */
186: PetscMalloc( npe*sizeof(PetscInt), &counts );
187: if( repart && !stokes ) {
188: /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */
189: Mat adj;
191: if (pc_gamg->verbose>0) {
192: if (pc_gamg->verbose==1) PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,ncrs_eq);
193: else {
194: PetscInt n;
195: MPI_Allreduce( &ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, wcomm );
196: PetscPrintf(wcomm,"\t[%d]%s repartition: npe (active): %d --> %d, neq = %d\n",mype,__FUNCT__,*a_nactive_proc,new_npe,n);
197: }
198: }
200: /* get 'adj' */
201: if( cr_bs == 1 ) {
202: MatConvert( Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );
203: }
204: else{
205: /* make a scalar matrix to partition (no Stokes here) */
206: Mat tMat;
207: PetscInt Istart_crs,Iend_crs,ncols,jj,Ii;
208: const PetscScalar *vals;
209: const PetscInt *idx;
210: PetscInt *d_nnz, *o_nnz, M, N;
211: static PetscInt llev = 0;
212:
213: PetscMalloc( ncrs_prim*sizeof(PetscInt), &d_nnz );
214: PetscMalloc( ncrs_prim*sizeof(PetscInt), &o_nnz );
215: MatGetOwnershipRange( Cmat, &Istart_crs, &Iend_crs );
216: MatGetSize( Cmat, &M, &N );
217: for ( Ii = Istart_crs, jj = 0 ; Ii < Iend_crs ; Ii += cr_bs, jj++ ) {
218: MatGetRow(Cmat,Ii,&ncols,0,0);
219: d_nnz[jj] = ncols/cr_bs;
220: o_nnz[jj] = ncols/cr_bs;
221: MatRestoreRow(Cmat,Ii,&ncols,0,0);
222: if( d_nnz[jj] > ncrs_prim ) d_nnz[jj] = ncrs_prim;
223: if( o_nnz[jj] > (M/cr_bs-ncrs_prim) ) o_nnz[jj] = M/cr_bs-ncrs_prim;
224: }
226: MatCreate( wcomm, &tMat );
227: MatSetSizes( tMat, ncrs_prim, ncrs_prim,
228: PETSC_DETERMINE, PETSC_DETERMINE );
229:
230: MatSetType(tMat,MATAIJ);
231: MatSeqAIJSetPreallocation(tMat,0,d_nnz);
232: MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);
233: PetscFree( d_nnz );
234: PetscFree( o_nnz );
236: for ( ii = Istart_crs; ii < Iend_crs; ii++ ) {
237: PetscInt dest_row = ii/cr_bs;
238: MatGetRow(Cmat,ii,&ncols,&idx,&vals);
239: for( jj = 0 ; jj < ncols ; jj++ ){
240: PetscInt dest_col = idx[jj]/cr_bs;
241: PetscScalar v = 1.0;
242: MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);
243: }
244: MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);
245: }
246: MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);
247: MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);
248:
249: if( llev++ == -1 ) {
250: PetscViewer viewer; char fname[32];
251: PetscSNPrintf(fname,sizeof fname,"part_mat_%D.mat",llev);
252: PetscViewerBinaryOpen(wcomm,fname,FILE_MODE_WRITE,&viewer);
253: MatView( tMat, viewer );
254: PetscViewerDestroy( &viewer );
255: }
257: MatConvert( tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj );
259: MatDestroy( &tMat );
260: } /* create 'adj' */
262: { /* partition: get newproc_idx */
263: char prefix[256];
264: const char *pcpre;
265: const PetscInt *is_idx;
266: MatPartitioning mpart;
267: IS proc_is;
268: PetscInt targetPE;
269:
270: MatPartitioningCreate( wcomm, &mpart );
271: MatPartitioningSetAdjacency( mpart, adj );
272: PCGetOptionsPrefix( pc, &pcpre );
273: PetscSNPrintf(prefix,sizeof prefix,"%spc_gamg_",pcpre?pcpre:"");
274: PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);
275: MatPartitioningSetFromOptions( mpart );
276: MatPartitioningSetNParts( mpart, new_npe );
277: MatPartitioningApply( mpart, &proc_is );
278: MatPartitioningDestroy( &mpart );
279:
280: /* collect IS info */
281: PetscMalloc( ncrs_eq*sizeof(PetscInt), &newproc_idx );
282: ISGetIndices( proc_is, &is_idx );
283: targetPE = 1; /* bring to "front" of machine */
284: /*targetPE = npe/new_npe;*/ /* spread partitioning across machine */
285: for( kk = jj = 0 ; kk < nloc_old ; kk++ ){
286: for( ii = 0 ; ii < cr_bs ; ii++, jj++ ){
287: newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */
288: }
289: }
290: ISRestoreIndices( proc_is, &is_idx );
291: ISDestroy( &proc_is );
292: }
293: MatDestroy( &adj );
295: ISCreateGeneral( wcomm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc );
296:
297: if( newproc_idx != 0 ) {
298: PetscFree( newproc_idx );
299: }
300: } /* repartitioning */
301: else { /* simple aggreagtion of parts -- 'is_eq_newproc' */
303: PetscInt rfactor,targetPE;
304: /* find factor */
305: if( new_npe == 1 ) rfactor = npe; /* easy */
306: else {
307: PetscReal best_fact = 0.;
308: jj = -1;
309: for( kk = 1 ; kk <= npe ; kk++ ){
310: if( npe%kk==0 ) { /* a candidate */
311: PetscReal nactpe = (PetscReal)npe/(PetscReal)kk, fact = nactpe/(PetscReal)new_npe;
312: if(fact > 1.0) fact = 1./fact; /* keep fact < 1 */
313: if( fact > best_fact ) {
314: best_fact = fact; jj = kk;
315: }
316: }
317: }
318: if( jj != -1 ) rfactor = jj;
319: else rfactor = 1; /* does this happen .. a prime */
320: }
321: new_npe = npe/rfactor;
323: if( new_npe==nactive ) {
324: *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */
325: PetscFree( counts );
326: if (pc_gamg->verbose>0){
327: PetscPrintf(wcomm,"\t[%d]%s aggregate processors noop: new_npe=%d, neq(loc)=%d\n",mype,__FUNCT__,new_npe,ncrs_eq);
328: }
329: return(0);
330: }
332: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",mype,__FUNCT__,ncrs_eq);
333: targetPE = mype/rfactor;
334: ISCreateStride( wcomm, ncrs_eq, targetPE, 0, &is_eq_newproc );
336: if( stokes ) {
337: ISCreateStride( wcomm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim );
338: }
339: } /* end simple 'is_eq_newproc' */
341: /*
342: Create an index set from the is_eq_newproc index set to indicate the mapping TO
343: */
344: ISPartitioningToNumbering( is_eq_newproc, &is_eq_num );
345: if( stokes ) {
346: ISPartitioningToNumbering( is_eq_newproc_prim, &is_eq_num_prim );
347: }
348: else is_eq_num_prim = is_eq_num;
349: /*
350: Determine how many equations/vertices are assigned to each processor
351: */
352: ISPartitioningCount( is_eq_newproc, npe, counts );
353: ncrs_eq_new = counts[mype];
354: ISDestroy( &is_eq_newproc );
355: if( stokes ) {
356: ISPartitioningCount( is_eq_newproc_prim, npe, counts );
357: ISDestroy( &is_eq_newproc_prim );
358: ncrs_prim_new = counts[mype]/cr_bs; /* nodes */
359: }
360: else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */
362: PetscFree( counts );
363: #if defined PETSC_GAMG_USE_LOG
364: PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);
365: #endif
367: /* move data (for primal equations only) */
368: /* Create a vector to contain the newly ordered element information */
369: VecCreate( wcomm, &dest_crd );
370: VecSetSizes( dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE );
371: VecSetFromOptions( dest_crd ); /* this is needed! */
372: /*
373: There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having
374: a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'.
375: */
376: PetscMalloc( (ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx );
377: ISGetIndices( is_eq_num_prim, &idx );
378: for(ii=0,jj=0; ii<ncrs_prim ; ii++) {
379: PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */
380: for( kk=0; kk<node_data_sz ; kk++, jj++) tidx[jj] = id*node_data_sz + kk;
381: }
382: ISRestoreIndices( is_eq_num_prim, &idx );
383: ISCreateGeneral( wcomm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat );
384:
385: PetscFree( tidx );
386: /*
387: Create a vector to contain the original vertex information for each element
388: */
389: VecCreateSeq( PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd );
390: for( jj=0; jj<ndata_cols ; jj++ ) {
391: const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows;
392: for( ii=0 ; ii<ncrs_prim ; ii++) {
393: for( kk=0; kk<ndata_rows ; kk++ ) {
394: PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj;
395: PetscScalar tt = (PetscScalar)pc_gamg->data[ix];
396: VecSetValues( src_crd, 1, &jx, &tt, INSERT_VALUES );
397: }
398: }
399: }
400: VecAssemblyBegin(src_crd);
401: VecAssemblyEnd(src_crd);
402: /*
403: Scatter the element vertex information (still in the original vertex ordering)
404: to the correct processor
405: */
406: VecScatterCreate( src_crd, PETSC_NULL, dest_crd, isscat, &vecscat);
407:
408: ISDestroy( &isscat );
409: VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
410: VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);
411: VecScatterDestroy( &vecscat );
412: VecDestroy( &src_crd );
413: /*
414: Put the element vertex data into a new allocation of the gdata->ele
415: */
416: PetscFree( pc_gamg->data );
417: PetscMalloc( node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data );
418: pc_gamg->data_sz = node_data_sz*ncrs_prim_new;
419: strideNew = ncrs_prim_new*ndata_rows;
420: VecGetArray( dest_crd, &array );
421: for( jj=0; jj<ndata_cols ; jj++ ) {
422: for( ii=0 ; ii<ncrs_prim_new ; ii++) {
423: for( kk=0; kk<ndata_rows ; kk++ ) {
424: PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj;
425: pc_gamg->data[ix] = PetscRealPart(array[jx]);
426: }
427: }
428: }
429: VecRestoreArray( dest_crd, &array );
430: VecDestroy( &dest_crd );
432: /* move A and P (columns) with new layout */
433: #if defined PETSC_GAMG_USE_LOG
434: PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);
435: #endif
437: /*
438: Invert for MatGetSubMatrix
439: */
440: ISInvertPermutation( is_eq_num, ncrs_eq_new, &new_eq_indices );
441: ISSort( new_eq_indices ); /* is this needed? */
442: ISSetBlockSize( new_eq_indices, cr_bs );
443: if(is_eq_num != is_eq_num_prim) {
444: ISDestroy( &is_eq_num_prim ); /* could be same as 'is_eq_num' */
445: }
446: ISDestroy( &is_eq_num );
447: #if defined PETSC_GAMG_USE_LOG
448: PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);
449: PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);
450: #endif
451: /* 'a_Amat_crs' output */
452: {
453: Mat mat;
454: MatGetSubMatrix( Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat );
455:
456: *a_Amat_crs = mat;
458: if(!PETSC_TRUE){
459: PetscInt cbs, rbs;
460: MatGetBlockSizes( Cmat, &rbs, &cbs );
461: PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
462: MatGetBlockSizes( mat, &rbs, &cbs );
463: PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",mype,__FUNCT__,rbs,cbs,cr_bs);
464: }
465: }
466: MatDestroy( &Cmat );
468: #if defined PETSC_GAMG_USE_LOG
469: PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);
470: #endif
471: /* prolongator */
472: {
473: IS findices;
474: PetscInt Istart,Iend;
475: Mat Pnew;
476: MatGetOwnershipRange( Pold, &Istart, &Iend );
477: #if defined PETSC_GAMG_USE_LOG
478: PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);
479: #endif
480: ISCreateStride(wcomm,Iend-Istart,Istart,1,&findices);
481: ISSetBlockSize(findices,f_bs);
482: MatGetSubMatrix( Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew );
483:
484: ISDestroy( &findices );
486: if(!PETSC_TRUE){
487: PetscInt cbs, rbs;
488: MatGetBlockSizes( Pold, &rbs, &cbs );
489: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
490: MatGetBlockSizes( Pnew, &rbs, &cbs );
491: PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",mype,__FUNCT__,rbs,cbs);
492: }
493: #if defined PETSC_GAMG_USE_LOG
494: PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);
495: #endif
496: MatDestroy( a_P_inout );
498: /* output - repartitioned */
499: *a_P_inout = Pnew;
500: }
501: ISDestroy( &new_eq_indices );
503: *a_nactive_proc = new_npe; /* output */
504: }
506: /* outout matrix data */
507: if( !PETSC_TRUE ) {
508: PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs;
509: if(llev==0) {
510: sprintf(fname,"Cmat_%d.m",llev++);
511: PetscViewerASCIIOpen(wcomm,fname,&viewer);
512: PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);
513: MatView(Amat_fine, viewer );
514: PetscViewerDestroy( &viewer );
515: }
516: sprintf(fname,"Cmat_%d.m",llev++);
517: PetscViewerASCIIOpen(wcomm,fname,&viewer);
518: PetscViewerSetFormat( viewer, PETSC_VIEWER_ASCII_MATLAB);
519: MatView(Cmat, viewer );
520: PetscViewerDestroy( &viewer );
521: }
523: return(0);
524: }
526: /* -------------------------------------------------------------------------- */
527: /*
528: PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner
529: by setting data structures and options.
531: Input Parameter:
532: . pc - the preconditioner context
534: Application Interface Routine: PCSetUp()
536: Notes:
537: The interface routine PCSetUp() is not usually called directly by
538: the user, but instead is called by PCApply() if necessary.
539: */
542: PetscErrorCode PCSetUp_GAMG( PC pc )
543: {
544: PetscErrorCode ierr;
545: PC_MG *mg = (PC_MG*)pc->data;
546: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
547: Mat Pmat = pc->pmat;
548: PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS];
549: MPI_Comm wcomm = ((PetscObject)pc)->comm;
550: PetscMPIInt mype,npe,nactivepe;
551: Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS];
552: PetscReal emaxs[GAMG_MAXLEVELS];
553: IS *ASMLocalIDsArr[GAMG_MAXLEVELS],removedEqs[GAMG_MAXLEVELS];
554: PetscInt level_bs[GAMG_MAXLEVELS];
555: GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS];
556: PetscLogDouble nnz0=0.,nnztot=0.;
557: MatInfo info;
558: PetscBool stokes = PETSC_FALSE;
559:
561: MPI_Comm_rank(wcomm,&mype);
562: MPI_Comm_size(wcomm,&npe);
563: if (pc_gamg->verbose>2) PetscPrintf(wcomm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",mype,__FUNCT__,pc_gamg->setup_count,pc->setupcalled);
564: if( pc_gamg->setup_count++ > 0 ) {
565: PC_MG_Levels **mglevels = mg->levels;
566: /* just do Galerkin grids */
567: Mat B,dA,dB;
568: assert(pc->setupcalled);
570: if( pc_gamg->Nlevels > 1 ) {
571: /* currently only handle case where mat and pmat are the same on coarser levels */
572: KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,PETSC_NULL);
573: /* (re)set to get dirty flag */
574: KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);
575:
576: for (level=pc_gamg->Nlevels-2; level>-1; level--) {
577: /* the first time through the matrix structure has changed from repartitioning */
578: if( pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) {
579: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);
580: MatDestroy(&mglevels[level]->A);
581: mglevels[level]->A = B;
582: }
583: else {
584: KSPGetOperators(mglevels[level]->smoothd,PETSC_NULL,&B,PETSC_NULL);
585: MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);
586: }
587: KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);
588: dB = B;
589: }
590: }
592: PCSetUp_MG( pc );CHKERRQ( ierr );
594: /* PCSetUp_MG seems to insists on setting this to GMRES */
595: KSPSetType( mglevels[0]->smoothd, KSPPREONLY );
597: return(0);
598: }
599: assert(pc->setupcalled == 0);
601: PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,PETSC_NULL);
603: GAMGKKTMatCreate( Pmat, stokes, &kktMatsArr[0] );
605: if( pc_gamg->data == 0 ) {
606: if( !pc_gamg->createdefaultdata ){
607: SETERRQ(wcomm,PETSC_ERR_LIB,"'createdefaultdata' not set(?) need to support NULL data");
608: }
609: if( stokes ) {
610: SETERRQ(wcomm,PETSC_ERR_LIB,"Need data (eg, PCSetCoordinates) for Stokes problems");
611: }
612: pc_gamg->createdefaultdata( pc, kktMatsArr[0].A11 );
613: }
615: /* get basic dims */
616: if( stokes ) {
617: bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */
618: }
619: else {
620: MatGetBlockSize( Pmat, &bs );
621: }
622:
623: MatGetSize( Pmat, &M, &qq );
624: if (pc_gamg->verbose) {
625: if(pc_gamg->verbose==1) MatGetInfo(Pmat,MAT_LOCAL,&info);
626: else MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);
627:
628: nnz0 = info.nz_used;
629: nnztot = info.nz_used;
630: PetscPrintf(wcomm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",
631: mype,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,
632: (int)(nnz0/(PetscReal)M),npe);
633: }
635: /* Get A_i and R_i */
636: for ( level=0, Aarr[0]=Pmat, nactivepe = npe; /* hard wired stopping logic */
637: level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (npe==1 || nactivepe>1); */
638: level++ ){
639: level1 = level + 1;
640: #if defined PETSC_GAMG_USE_LOG
641: PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);
642: #if (defined GAMG_STAGES)
643: PetscLogStagePush(gamg_stages[level]); CHKERRQ( ierr );
644: #endif
645: #endif
646: /* deal with Stokes, get sub matrices */
647: if( level > 0 ) {
648: GAMGKKTMatCreate( Aarr[level], stokes, &kktMatsArr[level] );
649: }
650: { /* construct prolongator */
651: Mat Gmat;
652: PetscCoarsenData *agg_lists;
653: Mat Prol11,Prol22;
655: level_bs[level] = bs;
656: pc_gamg->graph( pc,kktMatsArr[level].A11, &Gmat );
657: pc_gamg->coarsen( pc, &Gmat, &agg_lists );
658: pc_gamg->prolongator( pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11 );
660: /* could have failed to create new level */
661: if( Prol11 ){
662: /* get new block size of coarse matrices */
663: MatGetBlockSizes( Prol11, PETSC_NULL, &bs );
665: if( stokes ) {
666: if(!pc_gamg->formkktprol) SETERRQ(wcomm,PETSC_ERR_USER,"Stokes not supportd by AMG method.");
667: /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */
668: pc_gamg->formkktprol( pc, Prol11, kktMatsArr[level].A21, &Prol22 );
669: }
670:
671: if( pc_gamg->optprol ){
672: /* smooth */
673: pc_gamg->optprol( pc, kktMatsArr[level].A11, &Prol11 );
674: }
675:
676: if( stokes ) {
677: IS is_row[2];
678: Mat a[4];
679: is_row[0] = kktMatsArr[level].prim_is;
680: is_row[1] = kktMatsArr[level].constr_is;
681: a[0] = Prol11; a[1] = PETSC_NULL;
682: a[2] = PETSC_NULL; a[3] = Prol22;
683: MatCreateNest(wcomm,2,is_row, 2, is_row, a, &Parr[level1] );
684: }
685: else {
686: Parr[level1] = Prol11;
687: }
688: }
689: else Parr[level1] = PETSC_NULL;
691: if ( pc_gamg->use_aggs_in_gasm ) {
692: PetscCDGetASMBlocks(agg_lists, level_bs[level], &nASMBlocksArr[level], &ASMLocalIDsArr[level] );
693: }
695: PetscCDGetRemovedIS( agg_lists, &removedEqs[level] );
697: MatDestroy( &Gmat );
698: PetscCDDestroy( agg_lists );
699: } /* construct prolongator scope */
700: #if defined PETSC_GAMG_USE_LOG
701: PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);
702: #endif
703: /* cache eigen estimate */
704: if( pc_gamg->emax_id != -1 ){
705: PetscBool flag;
706: PetscObjectComposedDataGetReal( (PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag );
707: CHKERRQ( ierr );
708: if( !flag ) emaxs[level] = -1.;
709: }
710: else emaxs[level] = -1.;
711: if(level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */
712: if( !Parr[level1] ) {
713: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s stop gridding, level %d\n",mype,__FUNCT__,level);
714: break;
715: }
716: #if defined PETSC_GAMG_USE_LOG
717: PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);
718: #endif
720: createLevel( pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2),
721: stokes, &Parr[level1], &Aarr[level1], &nactivepe );
722:
724: #if defined PETSC_GAMG_USE_LOG
725: PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);
726: #endif
727: MatGetSize( Aarr[level1], &M, &qq );
729: if (pc_gamg->verbose > 0){
730: PetscInt NN = M;
731: if(pc_gamg->verbose==1) {
732: MatGetInfo(Aarr[level1],MAT_LOCAL,&info);
733: MatGetLocalSize( Aarr[level1], &NN, &qq );
734: }
735: else MatGetInfo( Aarr[level1], MAT_GLOBAL_SUM, &info );
737:
738: nnztot += info.nz_used;
739: PetscPrintf(wcomm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",
740: mype,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols,
741: (int)(info.nz_used/(PetscReal)NN), nactivepe );
742:
743: }
745: /* stop if one node -- could pull back for singular problems */
746: if( M/pc_gamg->data_cell_cols < 2 ) {
747: level++;
748: break;
749: }
750: #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES)
751: PetscLogStagePop(); CHKERRQ( ierr );
752: #endif
753: } /* levels */
755: if( pc_gamg->data ) {
756: PetscFree( pc_gamg->data ); CHKERRQ( ierr );
757: pc_gamg->data = PETSC_NULL;
758: }
760: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0);
761: pc_gamg->Nlevels = level + 1;
762: fine_level = level;
763: PCMGSetLevels(pc,pc_gamg->Nlevels,PETSC_NULL);
765: /* simple setup */
766: if( !PETSC_TRUE ){
767: PC_MG_Levels **mglevels = mg->levels;
768: for (lidx=0,level=pc_gamg->Nlevels-1;
769: lidx<fine_level;
770: lidx++, level--){
771: PCMGSetInterpolation( pc, lidx+1, Parr[level] );
772: KSPSetOperators( mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
773: MatDestroy( &Parr[level] );
774: MatDestroy( &Aarr[level] );
775: }
776: KSPSetOperators( mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );
777:
778: PCSetUp_MG( pc ); CHKERRQ( ierr );
779: }
780: else if( pc_gamg->Nlevels > 1 ) { /* don't setup MG if one level */
781: /* set default smoothers & set operators */
782: for ( lidx = 1, level = pc_gamg->Nlevels-2;
783: lidx <= fine_level;
784: lidx++, level--) {
785: KSP smoother;
786: PC subpc;
788: PCMGGetSmoother( pc, lidx, &smoother );
789: KSPGetPC( smoother, &subpc );
790:
791: KSPSetNormType( smoother, KSP_NORM_NONE );
792: /* set ops */
793: KSPSetOperators( smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN );
794: PCMGSetInterpolation( pc, lidx, Parr[level+1] );
796: /* create field split PC, get subsmoother */
797: if( stokes ) {
798: KSP *ksps;
799: PetscInt nn;
800: PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);
801: PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);
802: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
803: smoother = ksps[0];
804: KSPGetPC( smoother, &subpc );
805: PetscFree( ksps );
806: }
807: GAMGKKTMatDestroy( &kktMatsArr[level] );
809: /* set defaults */
810: KSPSetType( smoother, KSPCHEBYSHEV );
812: /* override defaults and command line args (!) */
813: if ( pc_gamg->use_aggs_in_gasm ) {
814: PetscInt sz;
815: IS *is;
817: sz = nASMBlocksArr[level];
818: is = ASMLocalIDsArr[level];
819: PCSetType( subpc, PCGASM );
820: if(sz==0){
821: IS is;
822: PetscInt my0,kk;
823: MatGetOwnershipRange( Aarr[level], &my0, &kk );
824: ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is );
825: PCGASMSetSubdomains( subpc, 1, &is, PETSC_NULL );
826: ISDestroy( &is );
827: }
828: else {
829: PetscInt kk;
830: PCGASMSetSubdomains( subpc, sz, is, PETSC_NULL );
831: for(kk=0;kk<sz;kk++){
832: ISDestroy( &is[kk] );
833: }
834: PetscFree( is );
835: }
836: PCGASMSetOverlap( subpc, 0 );
838: ASMLocalIDsArr[level] = PETSC_NULL;
839: nASMBlocksArr[level] = 0;
840: PCGASMSetType( subpc, PC_GASM_BASIC );
841: }
842: else {
843: PCSetType( subpc, PCJACOBI );
844: }
845: }
846: {
847: /* coarse grid */
848: KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first;
849: Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0;
850: PCMGGetSmoother( pc, lidx, &smoother );
851: KSPSetOperators( smoother, Lmat, Lmat, SAME_NONZERO_PATTERN );
852: KSPSetNormType( smoother, KSP_NORM_NONE );
853: KSPGetPC( smoother, &subpc );
854: PCSetType( subpc, PCBJACOBI );
855: PCSetUp( subpc );
856: PCBJacobiGetSubKSP(subpc,&ii,&first,&k2); assert(ii==1);
857: KSPGetPC(k2[0],&pc2);
858: PCSetType( pc2, PCLU );
859: }
861: /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */
862: PetscObjectOptionsBegin( (PetscObject)pc );
863: PCSetFromOptions_MG( pc );
864: PetscOptionsEnd();
865: if (mg->galerkin != 2) SETERRQ(wcomm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used.");
867: /* create cheby smoothers */
868: for ( lidx = 1, level = pc_gamg->Nlevels-2;
869: lidx <= fine_level;
870: lidx++, level--) {
871: KSP smoother;
872: PetscBool flag;
873: PC subpc;
875: PCMGGetSmoother( pc, lidx, &smoother );
876: KSPGetPC( smoother, &subpc );
878: /* create field split PC, get subsmoother */
879: if( stokes ) {
880: KSP *ksps;
881: PetscInt nn;
882: PCFieldSplitGetSubKSP(subpc,&nn,&ksps);
883: smoother = ksps[0];
884: KSPGetPC( smoother, &subpc );
885: PetscFree( ksps );
886: }
888: /* do my own cheby */
889: PetscObjectTypeCompare( (PetscObject)smoother, KSPCHEBYSHEV, &flag );
890: if( flag ) {
891: PetscReal emax, emin;
892: PetscObjectTypeCompare( (PetscObject)subpc, PCJACOBI, &flag );
893: if( flag && emaxs[level] > 0.0 ) emax=emaxs[level]; /* eigen estimate only for diagnal PC */
894: else{ /* eigen estimate 'emax' */
895: KSP eksp; Mat Lmat = Aarr[level];
896: Vec bb, xx;
898: MatGetVecs( Lmat, &bb, 0 );
899: MatGetVecs( Lmat, &xx, 0 );
900: {
901: PetscRandom rctx;
902: PetscRandomCreate(wcomm,&rctx);
903: PetscRandomSetFromOptions(rctx);
904: VecSetRandom(bb,rctx);
905: PetscRandomDestroy( &rctx );
906: }
908: if( removedEqs[level] ) {
909: /* being very careful - zeroing out BC rows (this is not done in agg.c estimates) */
910: PetscScalar *zeros;
911: PetscInt ii,jj, *idx_bs, sz, bs=level_bs[level];
912: const PetscInt *idx;
913: ISGetLocalSize( removedEqs[level], &sz );
914: PetscMalloc( bs*sz*sizeof(PetscScalar), &zeros );
915: for(ii=0;ii<bs*sz;ii++) zeros[ii] = 0.;
916: PetscMalloc( bs*sz*sizeof(PetscInt), &idx_bs );
917: ISGetIndices( removedEqs[level], &idx);
918: for(ii=0;ii<sz;ii++) {
919: for(jj=0;jj<bs;jj++) {
920: idx_bs[ii] = bs*idx[ii]+jj;
921: }
922: }
923: ISRestoreIndices( removedEqs[level], &idx );
924: if( sz > 0 ) {
925: VecSetValues( bb, sz, idx_bs, zeros, INSERT_VALUES );
926: }
927: PetscFree( idx_bs );
928: PetscFree( zeros );
929: VecAssemblyBegin(bb);
930: VecAssemblyEnd(bb);
931: }
932: KSPCreate( wcomm, &eksp );
933: KSPSetTolerances( eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10 );
934:
935: KSPSetNormType( eksp, KSP_NORM_NONE );
936: KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);
937: KSPAppendOptionsPrefix( eksp, "gamg_est_");
938: KSPSetFromOptions( eksp );
940: KSPSetInitialGuessNonzero( eksp, PETSC_FALSE );
941: KSPSetOperators( eksp, Lmat, Lmat, SAME_NONZERO_PATTERN ); CHKERRQ( ierr );
942: KSPSetComputeSingularValues( eksp,PETSC_TRUE );
944: /* set PC type to be same as smoother */
945: KSPSetPC( eksp, subpc ); CHKERRQ( ierr );
947: /* solve - keep stuff out of logging */
948: PetscLogEventDeactivate(KSP_Solve);
949: PetscLogEventDeactivate(PC_Apply);
950: KSPSolve( eksp, bb, xx );
951: PetscLogEventActivate(KSP_Solve);
952: PetscLogEventActivate(PC_Apply);
953:
954: KSPComputeExtremeSingularValues( eksp, &emax, &emin );
955:
956: VecDestroy( &xx );
957: VecDestroy( &bb );
958: KSPDestroy( &eksp );
959:
960: if( pc_gamg->verbose > 0 ) {
961: PetscInt N1, tt;
962: MatGetSize( Aarr[level], &N1, &tt );
963: PetscPrintf(wcomm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1);
964: }
965: }
966: {
967: PetscInt N1, N0;
968: MatGetSize( Aarr[level], &N1, PETSC_NULL );
969: MatGetSize( Aarr[level+1], &N0, PETSC_NULL );
970: /* heuristic - is this crap? */
971: emin = 1.*emax/((PetscReal)N1/(PetscReal)N0);
972: emax *= 1.05;
973: }
974: KSPChebyshevSetEigenvalues( smoother, emax, emin );
975: } /* setup checby flag */
977: if( removedEqs[level] ) {
978: ISDestroy( &removedEqs[level] );
979: }
980: } /* non-coarse levels */
981:
982: /* clean up */
983: for(level=1;level<pc_gamg->Nlevels;level++){
984: MatDestroy( &Parr[level] );
985: MatDestroy( &Aarr[level] );
986: }
988: PCSetUp_MG( pc );CHKERRQ( ierr );
989:
990: if( PETSC_FALSE ){
991: KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */
992: PCMGGetSmoother( pc, 0, &smoother );
993: KSPSetType( smoother, KSPPREONLY );
994: }
995: }
996: else {
997: KSP smoother;
998: if (pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",mype,__FUNCT__);
999: PCMGGetSmoother( pc, 0, &smoother );
1000: KSPSetOperators( smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN );
1001: KSPSetType( smoother, KSPPREONLY );
1002: PCSetUp_MG( pc );CHKERRQ( ierr );
1003: }
1005: return(0);
1006: }
1008: /* ------------------------------------------------------------------------- */
1009: /*
1010: PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner
1011: that was created with PCCreate_GAMG().
1013: Input Parameter:
1014: . pc - the preconditioner context
1016: Application Interface Routine: PCDestroy()
1017: */
1020: PetscErrorCode PCDestroy_GAMG( PC pc )
1021: {
1022: PetscErrorCode ierr;
1023: PC_MG *mg = (PC_MG*)pc->data;
1024: PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx;
1027: PCReset_GAMG( pc );
1028: PetscFree( pc_gamg );
1029: PCDestroy_MG( pc );
1030: return(0);
1031: }
1036: /*@
1037: PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via
1038: processor reduction.
1040: Not Collective on PC
1042: Input Parameters:
1043: . pc - the preconditioner context
1046: Options Database Key:
1047: . -pc_gamg_process_eq_limit
1049: Level: intermediate
1051: Concepts: Unstructured multrigrid preconditioner
1053: .seealso: ()
1054: @*/
1055: PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n)
1056: {
1058:
1061: PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));
1062: return(0);
1063: }
1065: EXTERN_C_BEGIN
1068: PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n)
1069: {
1070: PC_MG *mg = (PC_MG*)pc->data;
1071: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1072:
1074: if(n>0) pc_gamg->min_eq_proc = n;
1075: return(0);
1076: }
1077: EXTERN_C_END
1081: /*@
1082: PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids.
1084: Collective on PC
1086: Input Parameters:
1087: . pc - the preconditioner context
1090: Options Database Key:
1091: . -pc_gamg_coarse_eq_limit
1093: Level: intermediate
1095: Concepts: Unstructured multrigrid preconditioner
1097: .seealso: ()
1098: @*/
1099: PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n)
1100: {
1102:
1105: PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));
1106: return(0);
1107: }
1109: EXTERN_C_BEGIN
1112: PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n)
1113: {
1114: PC_MG *mg = (PC_MG*)pc->data;
1115: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1116:
1118: if(n>0) pc_gamg->coarse_eq_limit = n;
1119: return(0);
1120: }
1121: EXTERN_C_END
1125: /*@
1126: PCGAMGSetRepartitioning - Repartition the coarse grids
1128: Collective on PC
1130: Input Parameters:
1131: . pc - the preconditioner context
1134: Options Database Key:
1135: . -pc_gamg_repartition
1137: Level: intermediate
1139: Concepts: Unstructured multrigrid preconditioner
1141: .seealso: ()
1142: @*/
1143: PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n)
1144: {
1146:
1149: PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));
1150: return(0);
1151: }
1153: EXTERN_C_BEGIN
1156: PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n)
1157: {
1158: PC_MG *mg = (PC_MG*)pc->data;
1159: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1160:
1162: pc_gamg->repart = n;
1163: return(0);
1164: }
1165: EXTERN_C_END
1169: /*@
1170: PCGAMGSetUseASMAggs -
1172: Collective on PC
1174: Input Parameters:
1175: . pc - the preconditioner context
1178: Options Database Key:
1179: . -pc_gamg_use_agg_gasm
1181: Level: intermediate
1183: Concepts: Unstructured multrigrid preconditioner
1185: .seealso: ()
1186: @*/
1187: PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n)
1188: {
1190:
1193: PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));
1194: return(0);
1195: }
1197: EXTERN_C_BEGIN
1200: PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n)
1201: {
1202: PC_MG *mg = (PC_MG*)pc->data;
1203: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1204:
1206: pc_gamg->use_aggs_in_gasm = n;
1207: return(0);
1208: }
1209: EXTERN_C_END
1213: /*@
1214: PCGAMGSetNlevels -
1216: Not collective on PC
1218: Input Parameters:
1219: . pc - the preconditioner context
1222: Options Database Key:
1223: . -pc_mg_levels
1225: Level: intermediate
1227: Concepts: Unstructured multrigrid preconditioner
1229: .seealso: ()
1230: @*/
1231: PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n)
1232: {
1234:
1237: PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));
1238: return(0);
1239: }
1241: EXTERN_C_BEGIN
1244: PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n)
1245: {
1246: PC_MG *mg = (PC_MG*)pc->data;
1247: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1248:
1250: pc_gamg->Nlevels = n;
1251: return(0);
1252: }
1253: EXTERN_C_END
1257: /*@
1258: PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph
1260: Not collective on PC
1262: Input Parameters:
1263: . pc - the preconditioner context
1266: Options Database Key:
1267: . -pc_gamg_threshold
1269: Level: intermediate
1271: Concepts: Unstructured multrigrid preconditioner
1273: .seealso: ()
1274: @*/
1275: PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n)
1276: {
1278:
1281: PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));
1282: return(0);
1283: }
1285: EXTERN_C_BEGIN
1288: PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n)
1289: {
1290: PC_MG *mg = (PC_MG*)pc->data;
1291: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1292:
1294: pc_gamg->threshold = n;
1295: return(0);
1296: }
1297: EXTERN_C_END
1301: /*@
1302: PCGAMGSetType - Set solution method - calls sub create method
1304: Collective on PC
1306: Input Parameters:
1307: . pc - the preconditioner context
1310: Options Database Key:
1311: . -pc_gamg_type
1313: Level: intermediate
1315: Concepts: Unstructured multrigrid preconditioner
1317: .seealso: ()
1318: @*/
1319: PetscErrorCode PCGAMGSetType( PC pc, const PCGAMGType type )
1320: {
1322:
1325: PetscTryMethod(pc,"PCGAMGSetType_C",(PC,const PCGAMGType),(pc,type));
1326:
1327: return(0);
1328: }
1329:
1330: EXTERN_C_BEGIN
1333: PetscErrorCode PCGAMGSetType_GAMG( PC pc, const PCGAMGType type )
1334: {
1335: PetscErrorCode ierr,(*r)(PC);
1336:
1338: PetscFListFind(GAMGList,((PetscObject)pc)->comm,type,PETSC_FALSE,(PetscVoidStarFunction)&r);
1339:
1341: if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type);
1343: /* call sub create method */
1344: (*r)(pc);
1346: return(0);
1347: }
1348: EXTERN_C_END
1352: PetscErrorCode PCSetFromOptions_GAMG( PC pc )
1353: {
1354: PetscErrorCode ierr;
1355: PC_MG *mg = (PC_MG*)pc->data;
1356: PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx;
1357: PetscBool flag;
1358: MPI_Comm wcomm = ((PetscObject)pc)->comm;
1361: PetscOptionsHead("GAMG options");
1362: {
1363: /* -pc_gamg_verbose */
1364: PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG",
1365: "none", pc_gamg->verbose,
1366: &pc_gamg->verbose, PETSC_NULL );
1367:
1368:
1369: /* -pc_gamg_repartition */
1370: PetscOptionsBool("-pc_gamg_repartition",
1371: "Repartion coarse grids (false)",
1372: "PCGAMGRepartitioning",
1373: pc_gamg->repart,
1374: &pc_gamg->repart,
1375: &flag);
1376:
1377:
1378: /* -pc_gamg_use_agg_gasm */
1379: PetscOptionsBool("-pc_gamg_use_agg_gasm",
1380: "Use aggregation agragates for GASM smoother (false)",
1381: "PCGAMGUseASMAggs",
1382: pc_gamg->use_aggs_in_gasm,
1383: &pc_gamg->use_aggs_in_gasm,
1384: &flag);
1385:
1386:
1387: /* -pc_gamg_process_eq_limit */
1388: PetscOptionsInt("-pc_gamg_process_eq_limit",
1389: "Limit (goal) on number of equations per process on coarse grids",
1390: "PCGAMGSetProcEqLim",
1391: pc_gamg->min_eq_proc,
1392: &pc_gamg->min_eq_proc,
1393: &flag );
1394:
1395:
1396: /* -pc_gamg_coarse_eq_limit */
1397: PetscOptionsInt("-pc_gamg_coarse_eq_limit",
1398: "Limit on number of equations for the coarse grid",
1399: "PCGAMGSetCoarseEqLim",
1400: pc_gamg->coarse_eq_limit,
1401: &pc_gamg->coarse_eq_limit,
1402: &flag );
1403:
1405: /* -pc_gamg_threshold */
1406: PetscOptionsReal("-pc_gamg_threshold",
1407: "Relative threshold to use for dropping edges in aggregation graph",
1408: "PCGAMGSetThreshold",
1409: pc_gamg->threshold,
1410: &pc_gamg->threshold,
1411: &flag );
1412:
1413: if(flag && pc_gamg->verbose) PetscPrintf(wcomm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);
1415: PetscOptionsInt("-pc_mg_levels",
1416: "Set number of MG levels",
1417: "PCGAMGSetNlevels",
1418: pc_gamg->Nlevels,
1419: &pc_gamg->Nlevels,
1420: &flag );
1421: }
1422: PetscOptionsTail();
1424: return(0);
1425: }
1427: /* -------------------------------------------------------------------------- */
1428: /*MC
1429: PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework.
1430: - This is the entry point to GAMG, registered in pcregis.c
1432: Options Database Keys:
1433: Multigrid options(inherited)
1434: + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType)
1435: . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp)
1436: . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown)
1437: - -pc_mg_type <multiplicative>: (one of) additive multiplicative full cascade kascade
1439: Level: intermediate
1441: Concepts: multigrid
1443: .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType,
1444: PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(),
1445: PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(),
1446: PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(),
1447: PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR()
1448: M*/
1449: EXTERN_C_BEGIN
1452: PetscErrorCode PCCreate_GAMG( PC pc )
1453: {
1454: PetscErrorCode ierr;
1455: PC_GAMG *pc_gamg;
1456: PC_MG *mg;
1457: #if defined PETSC_GAMG_USE_LOG
1458: static long count = 0;
1459: #endif
1463: /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */
1464: PCSetType( pc, PCMG ); /* calls PCCreate_MG() and MGCreate_Private() */
1465: PetscObjectChangeTypeName( (PetscObject)pc, PCGAMG );
1467: /* create a supporting struct and attach it to pc */
1468: PetscNewLog( pc, PC_GAMG, &pc_gamg );
1469: mg = (PC_MG*)pc->data;
1470: mg->galerkin = 2; /* Use Galerkin, but it is computed externally */
1471: mg->innerctx = pc_gamg;
1473: pc_gamg->setup_count = 0;
1474: /* these should be in subctx but repartitioning needs simple arrays */
1475: pc_gamg->data_sz = 0;
1476: pc_gamg->data = 0;
1478: /* register AMG type */
1479: if( !GAMGList ){
1480: PetscFListAdd(&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void(*)(void))PCCreateGAMG_GEO);
1481: PetscFListAdd(&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void(*)(void))PCCreateGAMG_AGG);
1482: }
1484: /* overwrite the pointers of PCMG by the functions of base class PCGAMG */
1485: pc->ops->setfromoptions = PCSetFromOptions_GAMG;
1486: pc->ops->setup = PCSetUp_GAMG;
1487: pc->ops->reset = PCReset_GAMG;
1488: pc->ops->destroy = PCDestroy_GAMG;
1490: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1491: "PCGAMGSetProcEqLim_C",
1492: "PCGAMGSetProcEqLim_GAMG",
1493: PCGAMGSetProcEqLim_GAMG);
1494:
1496: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1497: "PCGAMGSetCoarseEqLim_C",
1498: "PCGAMGSetCoarseEqLim_GAMG",
1499: PCGAMGSetCoarseEqLim_GAMG);
1500:
1502: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1503: "PCGAMGSetRepartitioning_C",
1504: "PCGAMGSetRepartitioning_GAMG",
1505: PCGAMGSetRepartitioning_GAMG);
1506:
1508: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1509: "PCGAMGSetUseASMAggs_C",
1510: "PCGAMGSetUseASMAggs_GAMG",
1511: PCGAMGSetUseASMAggs_GAMG);
1512:
1514: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1515: "PCGAMGSetThreshold_C",
1516: "PCGAMGSetThreshold_GAMG",
1517: PCGAMGSetThreshold_GAMG);
1518:
1520: PetscObjectComposeFunctionDynamic( (PetscObject)pc,
1521: "PCGAMGSetType_C",
1522: "PCGAMGSetType_GAMG",
1523: PCGAMGSetType_GAMG);
1524:
1526: pc_gamg->repart = PETSC_FALSE;
1527: pc_gamg->use_aggs_in_gasm = PETSC_FALSE;
1528: pc_gamg->min_eq_proc = 100;
1529: pc_gamg->coarse_eq_limit = 800;
1530: pc_gamg->threshold = 0.001;
1531: pc_gamg->Nlevels = GAMG_MAXLEVELS;
1532: pc_gamg->verbose = 0;
1533: pc_gamg->emax_id = -1;
1535: /* private events */
1536: #if defined PETSC_GAMG_USE_LOG
1537: if( count++ == 0 ) {
1538: PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);
1539: PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);
1540: /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */
1541: /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */
1542: /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */
1543: PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);
1544: PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);
1545: PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);
1546: PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);
1547: PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);
1548: PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);
1549: PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);
1550: PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);
1551: PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);
1552: PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);
1553: PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);
1554: PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);
1556: /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */
1557: /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */
1558: /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */
1559: /* create timer stages */
1560: #if defined GAMG_STAGES
1561: {
1562: char str[32];
1563: sprintf(str,"MG Level %d (finest)",0);
1564: PetscLogStageRegister(str, &gamg_stages[0]);
1565: PetscInt lidx;
1566: for (lidx=1;lidx<9;lidx++){
1567: sprintf(str,"MG Level %d",lidx);
1568: PetscLogStageRegister(str, &gamg_stages[lidx]);
1569: }
1570: }
1571: #endif
1572: }
1573: #endif
1574: /* general events */
1575: #if defined PETSC_USE_LOG
1576: PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);
1577: PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);
1578: PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);
1579: PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);
1580: PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);
1581: PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);
1582: PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);
1583: PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);
1584: #endif
1586: /* instantiate derived type */
1587: PetscOptionsHead("GAMG options");
1588: {
1589: char tname[256] = GAMGAGG;
1590: PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",
1591: GAMGList, tname, tname, sizeof(tname), PETSC_NULL );
1592:
1593: PCGAMGSetType( pc, tname );
1594: }
1595: PetscOptionsTail();
1597: return(0);
1598: }
1599: EXTERN_C_END