Actual source code: util.c
1: /*
2: GAMG geometric-algebric multigrid PC - Mark Adams 2011
3: */
4: #include <petsc/private/matimpl.h>
5: #include <../src/ksp/pc/impls/gamg/gamg.h>
6: #include <petsc/private/kspimpl.h>
8: /*
9: Produces a set of block column indices of the matrix row, one for each block represented in the original row
11: n - the number of block indices in cc[]
12: cc - the block indices (must be large enough to contain the indices)
13: */
14: static inline PetscErrorCode MatCollapseRow(Mat Amat,PetscInt row,PetscInt bs,PetscInt *n,PetscInt *cc)
15: {
16: PetscInt cnt = -1,nidx,j;
17: const PetscInt *idx;
19: MatGetRow(Amat,row,&nidx,&idx,NULL);
20: if (nidx) {
21: cnt = 0;
22: cc[cnt] = idx[0]/bs;
23: for (j=1; j<nidx; j++) {
24: if (cc[cnt] < idx[j]/bs) cc[++cnt] = idx[j]/bs;
25: }
26: }
27: MatRestoreRow(Amat,row,&nidx,&idx,NULL);
28: *n = cnt+1;
29: return 0;
30: }
32: /*
33: Produces a set of block column indices of the matrix block row, one for each block represented in the original set of rows
35: ncollapsed - the number of block indices
36: collapsed - the block indices (must be large enough to contain the indices)
37: */
38: static inline PetscErrorCode MatCollapseRows(Mat Amat,PetscInt start,PetscInt bs,PetscInt *w0,PetscInt *w1,PetscInt *w2,PetscInt *ncollapsed,PetscInt **collapsed)
39: {
40: PetscInt i,nprev,*cprev = w0,ncur = 0,*ccur = w1,*merged = w2,*cprevtmp;
42: MatCollapseRow(Amat,start,bs,&nprev,cprev);
43: for (i=start+1; i<start+bs; i++) {
44: MatCollapseRow(Amat,i,bs,&ncur,ccur);
45: PetscMergeIntArray(nprev,cprev,ncur,ccur,&nprev,&merged);
46: cprevtmp = cprev; cprev = merged; merged = cprevtmp;
47: }
48: *ncollapsed = nprev;
49: if (collapsed) *collapsed = cprev;
50: return 0;
51: }
53: /* -------------------------------------------------------------------------- */
54: /*
55: PCGAMGCreateGraph - create simple scaled scalar graph from matrix
57: Input Parameter:
58: . Amat - matrix
59: Output Parameter:
60: . a_Gmaat - eoutput scalar graph (symmetric?)
61: */
62: PetscErrorCode PCGAMGCreateGraph(Mat Amat, Mat *a_Gmat)
63: {
64: PetscInt Istart,Iend,Ii,jj,kk,ncols,nloc,NN,MM,bs;
65: MPI_Comm comm;
66: Mat Gmat;
67: PetscBool ismpiaij,isseqaij;
69: PetscObjectGetComm((PetscObject)Amat,&comm);
70: MatGetOwnershipRange(Amat, &Istart, &Iend);
71: MatGetSize(Amat, &MM, &NN);
72: MatGetBlockSize(Amat, &bs);
73: nloc = (Iend-Istart)/bs;
75: PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);
76: PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);
78: PetscLogEventBegin(petsc_gamg_setup_events[GRAPH],0,0,0,0);
80: /* TODO GPU: these calls are potentially expensive if matrices are large and we want to use the GPU */
81: /* A solution consists in providing a new API, MatAIJGetCollapsedAIJ, and each class can provide a fast
82: implementation */
83: MatViewFromOptions(Amat, NULL, "-g_mat_view");
84: if (bs > 1 && (isseqaij || ((Mat_MPIAIJ*)Amat->data)->garray)) {
85: PetscInt *d_nnz, *o_nnz;
86: Mat a, b, c;
87: MatScalar *aa,val,AA[4096];
88: PetscInt *aj,*ai,AJ[4096],nc;
89: PetscInfo(Amat,"New bs>1 PCGAMGCreateGraph. nloc=%D\n",nloc);
90: if (isseqaij) {
91: a = Amat; b = NULL;
92: }
93: else {
94: Mat_MPIAIJ *d = (Mat_MPIAIJ*)Amat->data;
95: a = d->A; b = d->B;
96: }
97: PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);
98: for (c=a, kk=0 ; c && kk<2 ; c=b, kk++){
99: PetscInt *nnz = (c==a) ? d_nnz : o_nnz, nmax=0;
100: const PetscInt *cols;
101: for (PetscInt brow=0,jj,ok=1,j0; brow < nloc*bs; brow += bs) { // block rows
102: MatGetRow(c,brow,&jj,&cols,NULL);
103: nnz[brow/bs] = jj/bs;
104: if (jj%bs) ok = 0;
105: if (cols) j0 = cols[0];
106: else j0 = -1;
107: MatRestoreRow(c,brow,&jj,&cols,NULL);
108: if (nnz[brow/bs]>nmax) nmax = nnz[brow/bs];
109: for (PetscInt ii=1; ii < bs && nnz[brow/bs] ; ii++) { // check for non-dense blocks
110: MatGetRow(c,brow+ii,&jj,&cols,NULL);
111: if (jj%bs) ok = 0;
112: if ((cols && j0 != cols[0]) || (!cols && j0 != -1)) ok = 0;
113: if (nnz[brow/bs] != jj/bs) ok = 0;
114: MatRestoreRow(c,brow+ii,&jj,&cols,NULL);
115: }
116: if (!ok) {
117: PetscFree2(d_nnz,o_nnz);
118: goto old_bs;
119: }
120: }
122: }
123: MatCreate(comm, &Gmat);
124: MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);
125: MatSetBlockSizes(Gmat, 1, 1);
126: MatSetType(Gmat, MATAIJ);
127: MatSeqAIJSetPreallocation(Gmat,0,d_nnz);
128: MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);
129: PetscFree2(d_nnz,o_nnz);
130: // diag
131: for (PetscInt brow=0,n,grow; brow < nloc*bs; brow += bs) { // block rows
132: Mat_SeqAIJ *aseq = (Mat_SeqAIJ*)a->data;
133: ai = aseq->i;
134: n = ai[brow+1] - ai[brow];
135: aj = aseq->j + ai[brow];
136: for (int k=0; k<n; k += bs) { // block columns
137: AJ[k/bs] = aj[k]/bs + Istart/bs; // diag starts at (Istart,Istart)
138: val = 0;
139: for (int ii=0; ii<bs; ii++) { // rows in block
140: aa = aseq->a + ai[brow+ii] + k;
141: for (int jj=0; jj<bs; jj++) { // columns in block
142: val += PetscAbs(PetscRealPart(aa[jj])); // a sort of norm
143: }
144: }
145: AA[k/bs] = val;
146: }
147: grow = Istart/bs + brow/bs;
148: MatSetValues(Gmat,1,&grow,n/bs,AJ,AA,INSERT_VALUES);
149: }
150: // off-diag
151: if (ismpiaij) {
152: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Amat->data;
153: const PetscScalar *vals;
154: const PetscInt *cols, *garray = aij->garray;
156: for (PetscInt brow=0,grow; brow < nloc*bs; brow += bs) { // block rows
157: MatGetRow(b,brow,&ncols,&cols,NULL);
158: for (int k=0,cidx=0; k<ncols; k += bs,cidx++) {
159: AA[k/bs] = 0;
160: AJ[cidx] = garray[cols[k]]/bs;
161: }
162: nc = ncols/bs;
163: MatRestoreRow(b,brow,&ncols,&cols,NULL);
164: for (int ii=0; ii<bs; ii++) { // rows in block
165: MatGetRow(b,brow+ii,&ncols,&cols,&vals);
166: for (int k=0; k<ncols; k += bs) {
167: for (int jj=0; jj<bs; jj++) { // cols in block
168: AA[k/bs] += PetscAbs(PetscRealPart(vals[k+jj]));
169: }
170: }
171: MatRestoreRow(b,brow+ii,&ncols,&cols,&vals);
172: }
173: grow = Istart/bs + brow/bs;
174: MatSetValues(Gmat,1,&grow,nc,AJ,AA,INSERT_VALUES);
175: }
176: }
177: MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);
178: MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);
179: MatViewFromOptions(Gmat, NULL, "-g_mat_view");
180: } else if (bs > 1) {
181: const PetscScalar *vals;
182: const PetscInt *idx;
183: PetscInt *d_nnz, *o_nnz,*w0,*w1,*w2;
185: old_bs:
186: /*
187: Determine the preallocation needed for the scalar matrix derived from the vector matrix.
188: */
190: PetscInfo(Amat,"OLD bs>1 PCGAMGCreateGraph\n");
191: PetscObjectBaseTypeCompare((PetscObject)Amat,MATSEQAIJ,&isseqaij);
192: PetscObjectBaseTypeCompare((PetscObject)Amat,MATMPIAIJ,&ismpiaij);
193: PetscMalloc2(nloc, &d_nnz,isseqaij ? 0 : nloc, &o_nnz);
195: if (isseqaij) {
196: PetscInt max_d_nnz;
198: /*
199: Determine exact preallocation count for (sequential) scalar matrix
200: */
201: MatSeqAIJGetMaxRowNonzeros(Amat,&max_d_nnz);
202: max_d_nnz = PetscMin(nloc,bs*max_d_nnz);
203: PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);
204: for (Ii = 0, jj = 0; Ii < Iend; Ii += bs, jj++) {
205: MatCollapseRows(Amat,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);
206: }
207: PetscFree3(w0,w1,w2);
209: } else if (ismpiaij) {
210: Mat Daij,Oaij;
211: const PetscInt *garray;
212: PetscInt max_d_nnz;
214: MatMPIAIJGetSeqAIJ(Amat,&Daij,&Oaij,&garray);
216: /*
217: Determine exact preallocation count for diagonal block portion of scalar matrix
218: */
219: MatSeqAIJGetMaxRowNonzeros(Daij,&max_d_nnz);
220: max_d_nnz = PetscMin(nloc,bs*max_d_nnz);
221: PetscMalloc3(max_d_nnz, &w0,max_d_nnz, &w1,max_d_nnz, &w2);
222: for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
223: MatCollapseRows(Daij,Ii,bs,w0,w1,w2,&d_nnz[jj],NULL);
224: }
225: PetscFree3(w0,w1,w2);
227: /*
228: Over estimate (usually grossly over), preallocation count for off-diagonal portion of scalar matrix
229: */
230: for (Ii = 0, jj = 0; Ii < Iend - Istart; Ii += bs, jj++) {
231: o_nnz[jj] = 0;
232: for (kk=0; kk<bs; kk++) { /* rows that get collapsed to a single row */
233: MatGetRow(Oaij,Ii+kk,&ncols,NULL,NULL);
234: o_nnz[jj] += ncols;
235: MatRestoreRow(Oaij,Ii+kk,&ncols,NULL,NULL);
236: }
237: if (o_nnz[jj] > (NN/bs-nloc)) o_nnz[jj] = NN/bs-nloc;
238: }
240: } else SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Require AIJ matrix type");
242: /* get scalar copy (norms) of matrix */
243: MatCreate(comm, &Gmat);
244: MatSetSizes(Gmat,nloc,nloc,PETSC_DETERMINE,PETSC_DETERMINE);
245: MatSetBlockSizes(Gmat, 1, 1);
246: MatSetType(Gmat, MATAIJ);
247: MatSeqAIJSetPreallocation(Gmat,0,d_nnz);
248: MatMPIAIJSetPreallocation(Gmat,0,d_nnz,0,o_nnz);
249: PetscFree2(d_nnz,o_nnz);
251: for (Ii = Istart; Ii < Iend; Ii++) {
252: PetscInt dest_row = Ii/bs;
253: MatGetRow(Amat,Ii,&ncols,&idx,&vals);
254: for (jj=0; jj<ncols; jj++) {
255: PetscInt dest_col = idx[jj]/bs;
256: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
257: MatSetValues(Gmat,1,&dest_row,1,&dest_col,&sv,ADD_VALUES);
258: }
259: MatRestoreRow(Amat,Ii,&ncols,&idx,&vals);
260: }
261: MatAssemblyBegin(Gmat,MAT_FINAL_ASSEMBLY);
262: MatAssemblyEnd(Gmat,MAT_FINAL_ASSEMBLY);
263: MatViewFromOptions(Gmat, NULL, "-g_mat_view");
264: } else {
265: /* just copy scalar matrix - abs() not taken here but scaled later */
266: MatDuplicate(Amat, MAT_COPY_VALUES, &Gmat);
267: }
268: MatPropagateSymmetryOptions(Amat, Gmat);
270: PetscLogEventEnd(petsc_gamg_setup_events[GRAPH],0,0,0,0);
272: *a_Gmat = Gmat;
273: return 0;
274: }
276: /* -------------------------------------------------------------------------- */
277: /*@C
278: PCGAMGFilterGraph - filter (remove zero and possibly small values from the) graph and make it symmetric if requested
280: Collective on Mat
282: Input Parameters:
283: + a_Gmat - the graph
284: . vfilter - threshold parameter [0,1)
285: - symm - make the result symmetric
287: Level: developer
289: Notes:
290: This is called before graph coarsers are called.
292: .seealso: PCGAMGSetThreshold()
293: @*/
294: PetscErrorCode PCGAMGFilterGraph(Mat *a_Gmat,PetscReal vfilter,PetscBool symm)
295: {
296: PetscInt Istart,Iend,Ii,jj,ncols,nnz0,nnz1, NN, MM, nloc;
297: PetscMPIInt rank;
298: Mat Gmat = *a_Gmat, tGmat;
299: MPI_Comm comm;
300: const PetscScalar *vals;
301: const PetscInt *idx;
302: PetscInt *d_nnz, *o_nnz;
303: Vec diag;
305: PetscLogEventBegin(petsc_gamg_setup_events[SET16],0,0,0,0);
307: /* TODO GPU: optimization proposal, each class provides fast implementation of this
308: procedure via MatAbs API */
309: if (vfilter < 0.0 && !symm) {
310: /* Just use the provided matrix as the graph but make all values positive */
311: MatInfo info;
312: PetscScalar *avals;
313: PetscBool isaij,ismpiaij;
314: PetscObjectBaseTypeCompare((PetscObject)Gmat,MATSEQAIJ,&isaij);
315: PetscObjectBaseTypeCompare((PetscObject)Gmat,MATMPIAIJ,&ismpiaij);
317: if (isaij) {
318: MatGetInfo(Gmat,MAT_LOCAL,&info);
319: MatSeqAIJGetArray(Gmat,&avals);
320: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
321: MatSeqAIJRestoreArray(Gmat,&avals);
322: } else {
323: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)Gmat->data;
324: MatGetInfo(aij->A,MAT_LOCAL,&info);
325: MatSeqAIJGetArray(aij->A,&avals);
326: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
327: MatSeqAIJRestoreArray(aij->A,&avals);
328: MatGetInfo(aij->B,MAT_LOCAL,&info);
329: MatSeqAIJGetArray(aij->B,&avals);
330: for (jj = 0; jj<info.nz_used; jj++) avals[jj] = PetscAbsScalar(avals[jj]);
331: MatSeqAIJRestoreArray(aij->B,&avals);
332: }
333: PetscLogEventEnd(petsc_gamg_setup_events[SET16],0,0,0,0);
334: return 0;
335: }
337: /* TODO GPU: this can be called when filter = 0 -> Probably provide MatAIJThresholdCompress that compresses the entries below a threshold?
338: Also, if the matrix is symmetric, can we skip this
339: operation? It can be very expensive on large matrices. */
340: PetscObjectGetComm((PetscObject)Gmat,&comm);
341: MPI_Comm_rank(comm,&rank);
342: MatGetOwnershipRange(Gmat, &Istart, &Iend);
343: nloc = Iend - Istart;
344: MatGetSize(Gmat, &MM, &NN);
346: if (symm) {
347: Mat matTrans;
348: MatTranspose(Gmat, MAT_INITIAL_MATRIX, &matTrans);
349: MatAXPY(Gmat, 1.0, matTrans, Gmat->structurally_symmetric ? SAME_NONZERO_PATTERN : DIFFERENT_NONZERO_PATTERN);
350: MatDestroy(&matTrans);
351: }
353: /* scale Gmat for all values between -1 and 1 */
354: MatCreateVecs(Gmat, &diag, NULL);
355: MatGetDiagonal(Gmat, diag);
356: VecReciprocal(diag);
357: VecSqrtAbs(diag);
358: MatDiagonalScale(Gmat, diag, diag);
359: VecDestroy(&diag);
361: /* Determine upper bound on nonzeros needed in new filtered matrix */
362: PetscMalloc2(nloc, &d_nnz,nloc, &o_nnz);
363: for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) {
364: MatGetRow(Gmat,Ii,&ncols,NULL,NULL);
365: d_nnz[jj] = ncols;
366: o_nnz[jj] = ncols;
367: MatRestoreRow(Gmat,Ii,&ncols,NULL,NULL);
368: if (d_nnz[jj] > nloc) d_nnz[jj] = nloc;
369: if (o_nnz[jj] > (MM-nloc)) o_nnz[jj] = MM - nloc;
370: }
371: MatCreate(comm, &tGmat);
372: MatSetSizes(tGmat,nloc,nloc,MM,MM);
373: MatSetBlockSizes(tGmat, 1, 1);
374: MatSetType(tGmat, MATAIJ);
375: MatSeqAIJSetPreallocation(tGmat,0,d_nnz);
376: MatMPIAIJSetPreallocation(tGmat,0,d_nnz,0,o_nnz);
377: MatSetOption(tGmat,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE);
378: PetscFree2(d_nnz,o_nnz);
380: for (Ii = Istart, nnz0 = nnz1 = 0; Ii < Iend; Ii++) {
381: MatGetRow(Gmat,Ii,&ncols,&idx,&vals);
382: for (jj=0; jj<ncols; jj++,nnz0++) {
383: PetscScalar sv = PetscAbs(PetscRealPart(vals[jj]));
384: if (PetscRealPart(sv) > vfilter) {
385: nnz1++;
386: MatSetValues(tGmat,1,&Ii,1,&idx[jj],&sv,INSERT_VALUES);
387: }
388: }
389: MatRestoreRow(Gmat,Ii,&ncols,&idx,&vals);
390: }
391: MatAssemblyBegin(tGmat,MAT_FINAL_ASSEMBLY);
392: MatAssemblyEnd(tGmat,MAT_FINAL_ASSEMBLY);
393: if (symm) {
394: MatSetOption(tGmat,MAT_SYMMETRIC,PETSC_TRUE);
395: } else {
396: MatPropagateSymmetryOptions(Gmat,tGmat);
397: }
398: PetscLogEventEnd(petsc_gamg_setup_events[SET16],0,0,0,0);
400: #if defined(PETSC_USE_INFO)
401: {
402: double t1 = (!nnz0) ? 1. : 100.*(double)nnz1/(double)nnz0, t2 = (!nloc) ? 1. : (double)nnz0/(double)nloc;
403: PetscInfo(*a_Gmat,"\t %g%% nnz after filtering, with threshold %g, %g nnz ave. (N=%D)\n",t1,vfilter,t2,MM);
404: }
405: #endif
406: MatDestroy(&Gmat);
407: *a_Gmat = tGmat;
408: return 0;
409: }
411: /* -------------------------------------------------------------------------- */
412: /*
413: PCGAMGGetDataWithGhosts - hacks into Mat MPIAIJ so this must have size > 1
415: Input Parameter:
416: . Gmat - MPIAIJ matrix for scattters
417: . data_sz - number of data terms per node (# cols in output)
418: . data_in[nloc*data_sz] - column oriented data
419: Output Parameter:
420: . a_stride - numbrt of rows of output
421: . a_data_out[stride*data_sz] - output data with ghosts
422: */
423: PetscErrorCode PCGAMGGetDataWithGhosts(Mat Gmat,PetscInt data_sz,PetscReal data_in[],PetscInt *a_stride,PetscReal **a_data_out)
424: {
425: Vec tmp_crds;
426: Mat_MPIAIJ *mpimat = (Mat_MPIAIJ*)Gmat->data;
427: PetscInt nnodes,num_ghosts,dir,kk,jj,my0,Iend,nloc;
428: PetscScalar *data_arr;
429: PetscReal *datas;
430: PetscBool isMPIAIJ;
432: PetscObjectBaseTypeCompare((PetscObject)Gmat, MATMPIAIJ, &isMPIAIJ);
433: MatGetOwnershipRange(Gmat, &my0, &Iend);
434: nloc = Iend - my0;
435: VecGetLocalSize(mpimat->lvec, &num_ghosts);
436: nnodes = num_ghosts + nloc;
437: *a_stride = nnodes;
438: MatCreateVecs(Gmat, &tmp_crds, NULL);
440: PetscMalloc1(data_sz*nnodes, &datas);
441: for (dir=0; dir<data_sz; dir++) {
442: /* set local, and global */
443: for (kk=0; kk<nloc; kk++) {
444: PetscInt gid = my0 + kk;
445: PetscScalar crd = (PetscScalar)data_in[dir*nloc + kk]; /* col oriented */
446: datas[dir*nnodes + kk] = PetscRealPart(crd);
448: VecSetValues(tmp_crds, 1, &gid, &crd, INSERT_VALUES);
449: }
450: VecAssemblyBegin(tmp_crds);
451: VecAssemblyEnd(tmp_crds);
452: /* get ghost datas */
453: VecScatterBegin(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
454: VecScatterEnd(mpimat->Mvctx,tmp_crds,mpimat->lvec,INSERT_VALUES,SCATTER_FORWARD);
455: VecGetArray(mpimat->lvec, &data_arr);
456: for (kk=nloc,jj=0;jj<num_ghosts;kk++,jj++) datas[dir*nnodes + kk] = PetscRealPart(data_arr[jj]);
457: VecRestoreArray(mpimat->lvec, &data_arr);
458: }
459: VecDestroy(&tmp_crds);
460: *a_data_out = datas;
461: return 0;
462: }
464: PetscErrorCode PCGAMGHashTableCreate(PetscInt a_size, PCGAMGHashTable *a_tab)
465: {
466: PetscInt kk;
468: a_tab->size = a_size;
469: PetscMalloc2(a_size, &a_tab->table,a_size, &a_tab->data);
470: for (kk=0; kk<a_size; kk++) a_tab->table[kk] = -1;
471: return 0;
472: }
474: PetscErrorCode PCGAMGHashTableDestroy(PCGAMGHashTable *a_tab)
475: {
476: PetscFree2(a_tab->table,a_tab->data);
477: return 0;
478: }
480: PetscErrorCode PCGAMGHashTableAdd(PCGAMGHashTable *a_tab, PetscInt a_key, PetscInt a_data)
481: {
482: PetscInt kk,idx;
485: for (kk = 0, idx = GAMG_HASH(a_key); kk < a_tab->size; kk++, idx = (idx==(a_tab->size-1)) ? 0 : idx + 1) {
486: if (a_tab->table[idx] == a_key) {
487: /* exists */
488: a_tab->data[idx] = a_data;
489: break;
490: } else if (a_tab->table[idx] == -1) {
491: /* add */
492: a_tab->table[idx] = a_key;
493: a_tab->data[idx] = a_data;
494: break;
495: }
496: }
497: if (kk==a_tab->size) {
498: /* this is not to efficient, waiting until completely full */
499: PetscInt oldsize = a_tab->size, new_size = 2*a_tab->size + 5, *oldtable = a_tab->table, *olddata = a_tab->data;
501: a_tab->size = new_size;
502: PetscMalloc2(a_tab->size, &a_tab->table,a_tab->size, &a_tab->data);
503: for (kk=0;kk<a_tab->size;kk++) a_tab->table[kk] = -1;
504: for (kk=0;kk<oldsize;kk++) {
505: if (oldtable[kk] != -1) {
506: PCGAMGHashTableAdd(a_tab, oldtable[kk], olddata[kk]);
507: }
508: }
509: PetscFree2(oldtable,olddata);
510: PCGAMGHashTableAdd(a_tab, a_key, a_data);
511: }
512: return 0;
513: }