Actual source code: xxt.c

petsc-3.6.1 2015-08-06
Report Typos and Errors
  2: /*************************************xxt.c************************************
  3: Module Name: xxt
  4: Module Info:

  6: author:  Henry M. Tufo III
  7: e-mail:  hmt@asci.uchicago.edu
  8: contact:
  9: +--------------------------------+--------------------------------+
 10: |MCS Division - Building 221     |Department of Computer Science  |
 11: |Argonne National Laboratory     |Ryerson 152                     |
 12: |9700 S. Cass Avenue             |The University of Chicago       |
 13: |Argonne, IL  60439              |Chicago, IL  60637              |
 14: |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
 15: +--------------------------------+--------------------------------+

 17: Last Modification: 3.20.01
 18: **************************************xxt.c***********************************/
 19: #include <../src/ksp/pc/impls/tfs/tfs.h>

 21: #define LEFT  -1
 22: #define RIGHT  1
 23: #define BOTH   0

 25: typedef struct xxt_solver_info {
 26:   PetscInt    n, m, n_global, m_global;
 27:   PetscInt    nnz, max_nnz, msg_buf_sz;
 28:   PetscInt    *nsep, *lnsep, *fo, nfo, *stages;
 29:   PetscInt    *col_sz, *col_indices;
 30:   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
 31:   PetscInt    nsolves;
 32:   PetscScalar tot_solve_time;
 33: } xxt_info;

 35: typedef struct matvec_info {
 36:   PetscInt     n, m, n_global, m_global;
 37:   PetscInt     *local2global;
 38:   PCTFS_gs_ADT PCTFS_gs_handle;
 39:   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
 40:   void *grid_data;
 41: } mv_info;

 43: struct xxt_CDT {
 44:   PetscInt id;
 45:   PetscInt ns;
 46:   PetscInt level;
 47:   xxt_info *info;
 48:   mv_info  *mvi;
 49: };

 51: static PetscInt n_xxt        =0;
 52: static PetscInt n_xxt_handles=0;

 54: /* prototypes */
 55: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
 56: static PetscErrorCode check_handle(xxt_ADT xxt_handle);
 57: static PetscErrorCode det_separators(xxt_ADT xxt_handle);
 58: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
 59: static PetscInt xxt_generate(xxt_ADT xxt_handle);
 60: static PetscInt do_xxt_factor(xxt_ADT xxt_handle);
 61: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);

 63: /**************************************xxt.c***********************************/
 64: xxt_ADT XXT_new(void)
 65: {
 66:   xxt_ADT xxt_handle;

 68:   /* rolling count on n_xxt ... pot. problem here */
 69:   n_xxt_handles++;
 70:   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
 71:   xxt_handle->id   = ++n_xxt;
 72:   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;

 74:   return(xxt_handle);
 75: }

 77: /**************************************xxt.c***********************************/
 78: PetscInt XXT_factor(xxt_ADT xxt_handle,     /* prev. allocated xxt  handle */
 79:                     PetscInt *local2global, /* global column mapping       */
 80:                     PetscInt n,             /* local num rows              */
 81:                     PetscInt m,             /* local num cols              */
 82:                     PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc         */
 83:                     void *grid_data)        /* grid data for matvec        */
 84: {
 85:   PCTFS_comm_init();
 86:   check_handle(xxt_handle);

 88:   /* only 2^k for now and all nodes participating */
 89:   if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);

 91:   /* space for X info */
 92:   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));

 94:   /* set up matvec handles */
 95:   xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);

 97:   /* matrix is assumed to be of full rank */
 98:   /* LATER we can reset to indicate rank def. */
 99:   xxt_handle->ns=0;

101:   /* determine separators and generate firing order - NB xxt info set here */
102:   det_separators(xxt_handle);

104:   return(do_xxt_factor(xxt_handle));
105: }

107: /**************************************xxt.c***********************************/
108: PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
109: {

111:   PCTFS_comm_init();
112:   check_handle(xxt_handle);

114:   /* need to copy b into x? */
115:   if (b) PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);
116:   do_xxt_solve(xxt_handle,x);

118:   return(0);
119: }

121: /**************************************xxt.c***********************************/
122: PetscInt XXT_free(xxt_ADT xxt_handle)
123: {

125:   PCTFS_comm_init();
126:   check_handle(xxt_handle);
127:   n_xxt_handles--;

129:   free(xxt_handle->info->nsep);
130:   free(xxt_handle->info->lnsep);
131:   free(xxt_handle->info->fo);
132:   free(xxt_handle->info->stages);
133:   free(xxt_handle->info->solve_uu);
134:   free(xxt_handle->info->solve_w);
135:   free(xxt_handle->info->x);
136:   free(xxt_handle->info->col_vals);
137:   free(xxt_handle->info->col_sz);
138:   free(xxt_handle->info->col_indices);
139:   free(xxt_handle->info);
140:   free(xxt_handle->mvi->local2global);
141:   PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle);
142:   free(xxt_handle->mvi);
143:   free(xxt_handle);

145:   /* if the check fails we nuke */
146:   /* if NULL pointer passed to free we nuke */
147:   /* if the calls to free fail that's not my problem */
148:   return(0);
149: }

151: /**************************************xxt.c***********************************/
152: PetscInt XXT_stats(xxt_ADT xxt_handle)
153: {
154:   PetscInt       op[]  = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
155:   PetscInt       fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
156:   PetscInt       vals[9],  work[9];
157:   PetscScalar    fvals[3], fwork[3];

160:   PCTFS_comm_init();
161:   check_handle(xxt_handle);

163:   /* if factorization not done there are no stats */
164:   if (!xxt_handle->info||!xxt_handle->mvi) {
165:     if (!PCTFS_my_id) { PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n"); }
166:     return 1;
167:   }

169:   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
170:   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
171:   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
172:   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);

174:   fvals[0]=fvals[1]=fvals[2] =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
175:   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);

177:   if (!PCTFS_my_id) {
178:     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",PCTFS_my_id,vals[0]);
179:     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",PCTFS_my_id,vals[1]);
180:     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
181:     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",PCTFS_my_id,vals[2]);
182:     PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.5)));
183:     PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.6667)));
184:     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",PCTFS_my_id,vals[3]);
185:     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",PCTFS_my_id,vals[4]);
186:     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
187:     PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",PCTFS_my_id,vals[5]);
188:     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",PCTFS_my_id,vals[6]);
189:     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",PCTFS_my_id,vals[7]);
190:     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
191:     PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",PCTFS_my_id,fvals[0]);
192:     PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",PCTFS_my_id,fvals[1]);
193:     PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
194:   }

196:   return(0);
197: }

199: /*************************************xxt.c************************************

201: Description: get A_local, local portion of global coarse matrix which
202: is a row dist. nxm matrix w/ n<m.
203:    o my_ml holds address of ML struct associated w/A_local and coarse grid
204:    o local2global holds global number of column i (i=0,...,m-1)
205:    o local2global holds global number of row    i (i=0,...,n-1)
206:    o mylocmatvec performs A_local . vec_local (note that gs is performed using
207:    PCTFS_gs_init/gop).

209: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
210: mylocmatvec (void :: void *data, double *in, double *out)
211: **************************************xxt.c***********************************/
212: static PetscInt do_xxt_factor(xxt_ADT xxt_handle)
213: {
214:   return xxt_generate(xxt_handle);
215: }

217: /**************************************xxt.c***********************************/
218: static PetscInt xxt_generate(xxt_ADT xxt_handle)
219: {
220:   PetscInt       i,j,k,idex;
221:   PetscInt       dim, col;
222:   PetscScalar    *u, *uu, *v, *z, *w, alpha, alpha_w;
223:   PetscInt       *segs;
224:   PetscInt       op[] = {GL_ADD,0};
225:   PetscInt       off, len;
226:   PetscScalar    *x_ptr;
227:   PetscInt       *iptr, flag;
228:   PetscInt       start =0, end, work;
229:   PetscInt       op2[] = {GL_MIN,0};
230:   PCTFS_gs_ADT   PCTFS_gs_handle;
231:   PetscInt       *nsep, *lnsep, *fo;
232:   PetscInt       a_n            =xxt_handle->mvi->n;
233:   PetscInt       a_m            =xxt_handle->mvi->m;
234:   PetscInt       *a_local2global=xxt_handle->mvi->local2global;
235:   PetscInt       level;
236:   PetscInt       xxt_nnz=0, xxt_max_nnz=0;
237:   PetscInt       n, m;
238:   PetscInt       *col_sz, *col_indices, *stages;
239:   PetscScalar    **col_vals, *x;
240:   PetscInt       n_global;
241:   PetscInt       xxt_zero_nnz  =0;
242:   PetscInt       xxt_zero_nnz_0=0;
243:   PetscBLASInt   i1            = 1,dlen;
244:   PetscScalar    dm1           = -1.0;

247:   n               = xxt_handle->mvi->n;
248:   nsep            = xxt_handle->info->nsep;
249:   lnsep           = xxt_handle->info->lnsep;
250:   fo              = xxt_handle->info->fo;
251:   end             = lnsep[0];
252:   level           = xxt_handle->level;
253:   PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;

255:   /* is there a null space? */
256:   /* LATER add in ability to detect null space by checking alpha */
257:   for (i=0, j=0; i<=level; i++) j+=nsep[i];

259:   m = j-xxt_handle->ns;
260:   if (m!=j) {
261:     PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);
262:   }

264:   /* get and initialize storage for x local         */
265:   /* note that x local is nxm and stored by columns */
266:   col_sz      = (PetscInt*) malloc(m*sizeof(PetscInt));
267:   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
268:   col_vals    = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
269:   for (i=j=0; i<m; i++, j+=2) {
270:     col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
271:     col_vals[i]   = NULL;
272:   }
273:   col_indices[j]=-1;

275:   /* size of separators for each sub-hc working from bottom of tree to top */
276:   /* this looks like nsep[]=segments */
277:   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
278:   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
279:   PCTFS_ivec_zero(stages,level+1);
280:   PCTFS_ivec_copy(segs,nsep,level+1);
281:   for (i=0; i<level; i++) segs[i+1] += segs[i];
282:   stages[0] = segs[0];

284:   /* temporary vectors  */
285:   u  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
286:   z  = (PetscScalar*) malloc(n*sizeof(PetscScalar));
287:   v  = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
288:   uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
289:   w  = (PetscScalar*) malloc(m*sizeof(PetscScalar));

291:   /* extra nnz due to replication of vertices across separators */
292:   for (i=1, j=0; i<=level; i++) j+=nsep[i];

294:   /* storage for sparse x values */
295:   n_global    = xxt_handle->info->n_global;
296:   xxt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
297:   x           = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
298:   xxt_nnz     = 0;

300:   /* LATER - can embed next sep to fire in gs */
301:   /* time to make the donuts - generate X factor */
302:   for (dim=i=j=0; i<m; i++) {
303:     /* time to move to the next level? */
304:     while (i==segs[dim]) {
305:       if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
306:       stages[dim++]=i;
307:       end         +=lnsep[dim];
308:     }
309:     stages[dim]=i;

311:     /* which column are we firing? */
312:     /* i.e. set v_l */
313:     /* use new seps and do global min across hc to determine which one to fire */
314:     (start<end) ? (col=fo[start]) : (col=INT_MAX);
315:     PCTFS_giop_hc(&col,&work,1,op2,dim);

317:     /* shouldn't need this */
318:     if (col==INT_MAX) {
319:       PetscInfo(0,"hey ... col==INT_MAX??\n");
320:       continue;
321:     }

323:     /* do I own it? I should */
324:     PCTFS_rvec_zero(v,a_m);
325:     if (col==fo[start]) {
326:       start++;
327:       idex=PCTFS_ivec_linear_search(col, a_local2global, a_n);
328:       if (idex!=-1) {
329:         v[idex] = 1.0; j++;
330:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
331:     } else {
332:       idex=PCTFS_ivec_linear_search(col, a_local2global, a_m);
333:       if (idex!=-1) v[idex] = 1.0;
334:     }

336:     /* perform u = A.v_l */
337:     PCTFS_rvec_zero(u,n);
338:     do_matvec(xxt_handle->mvi,v,u);

340:     /* uu =  X^T.u_l (local portion) */
341:     /* technically only need to zero out first i entries */
342:     /* later turn this into an XXT_solve call ? */
343:     PCTFS_rvec_zero(uu,m);
344:     x_ptr=x;
345:     iptr = col_indices;
346:     for (k=0; k<i; k++) {
347:       off   = *iptr++;
348:       len   = *iptr++;
349:       PetscBLASIntCast(len,&dlen);
350:       PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1));
351:       x_ptr+=len;
352:     }


355:     /* uu = X^T.u_l (comm portion) */
356:     PCTFS_ssgl_radd  (uu, w, dim, stages);

358:     /* z = X.uu */
359:     PCTFS_rvec_zero(z,n);
360:     x_ptr=x;
361:     iptr = col_indices;
362:     for (k=0; k<i; k++) {
363:       off  = *iptr++;
364:       len  = *iptr++;
365:       PetscBLASIntCast(len,&dlen);
366:       PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
367:       x_ptr+=len;
368:     }

370:     /* compute v_l = v_l - z */
371:     PCTFS_rvec_zero(v+a_n,a_m-a_n);
372:     PetscBLASIntCast(n,&dlen);
373:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));

375:     /* compute u_l = A.v_l */
376:     if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
377:     PCTFS_rvec_zero(u,n);
378:     do_matvec(xxt_handle->mvi,v,u);

380:     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
381:     PetscBLASIntCast(n,&dlen);
382:     PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,v,&i1));
383:     /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
384:     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);

386:     alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);

388:     /* check for small alpha                             */
389:     /* LATER use this to detect and determine null space */
390:     if (PetscAbsScalar(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);

392:     /* compute v_l = v_l/sqrt(alpha) */
393:     PCTFS_rvec_scale(v,1.0/alpha,n);

395:     /* add newly generated column, v_l, to X */
396:     flag = 1;
397:     off=len=0;
398:     for (k=0; k<n; k++) {
399:       if (v[k]!=0.0) {
400:         len=k;
401:         if (flag) { off=k; flag=0; }
402:       }
403:     }

405:     len -= (off-1);

407:     if (len>0) {
408:       if ((xxt_nnz+len)>xxt_max_nnz) {
409:         PetscInfo(0,"increasing space for X by 2x!\n");
410:         xxt_max_nnz *= 2;
411:         x_ptr        = (PetscScalar*) malloc(xxt_max_nnz*sizeof(PetscScalar));
412:         PCTFS_rvec_copy(x_ptr,x,xxt_nnz);
413:         free(x);
414:         x     = x_ptr;
415:         x_ptr+=xxt_nnz;
416:       }
417:       xxt_nnz += len;
418:       PCTFS_rvec_copy(x_ptr,v+off,len);

420:       /* keep track of number of zeros */
421:       if (dim) {
422:         for (k=0; k<len; k++) {
423:           if (x_ptr[k]==0.0) xxt_zero_nnz++;
424:         }
425:       } else {
426:         for (k=0; k<len; k++) {
427:           if (x_ptr[k]==0.0) xxt_zero_nnz_0++;
428:         }
429:       }
430:       col_indices[2*i] = off;
431:       col_sz[i] = col_indices[2*i+1] = len;
432:       col_vals[i] = x_ptr;
433:     }
434:     else {
435:       col_indices[2*i] = 0;
436:       col_sz[i]        = col_indices[2*i+1] = 0;
437:       col_vals[i]      = x_ptr;
438:     }
439:   }

441:   /* close off stages for execution phase */
442:   while (dim!=level) {
443:     stages[dim++] = i;
444:     PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);
445:   }
446:   stages[dim]=i;

448:   xxt_handle->info->n              = xxt_handle->mvi->n;
449:   xxt_handle->info->m              = m;
450:   xxt_handle->info->nnz            = xxt_nnz;
451:   xxt_handle->info->max_nnz        = xxt_max_nnz;
452:   xxt_handle->info->msg_buf_sz     = stages[level]-stages[0];
453:   xxt_handle->info->solve_uu       = (PetscScalar*) malloc(m*sizeof(PetscScalar));
454:   xxt_handle->info->solve_w        = (PetscScalar*) malloc(m*sizeof(PetscScalar));
455:   xxt_handle->info->x              = x;
456:   xxt_handle->info->col_vals       = col_vals;
457:   xxt_handle->info->col_sz         = col_sz;
458:   xxt_handle->info->col_indices    = col_indices;
459:   xxt_handle->info->stages         = stages;
460:   xxt_handle->info->nsolves        = 0;
461:   xxt_handle->info->tot_solve_time = 0.0;

463:   free(segs);
464:   free(u);
465:   free(v);
466:   free(uu);
467:   free(z);
468:   free(w);

470:   return(0);
471: }

473: /**************************************xxt.c***********************************/
474: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
475: {
476:   PetscInt       off, len, *iptr;
477:   PetscInt       level        = xxt_handle->level;
478:   PetscInt       n            = xxt_handle->info->n;
479:   PetscInt       m            = xxt_handle->info->m;
480:   PetscInt       *stages      = xxt_handle->info->stages;
481:   PetscInt       *col_indices = xxt_handle->info->col_indices;
482:   PetscScalar    *x_ptr, *uu_ptr;
483:   PetscScalar    *solve_uu = xxt_handle->info->solve_uu;
484:   PetscScalar    *solve_w  = xxt_handle->info->solve_w;
485:   PetscScalar    *x        = xxt_handle->info->x;
486:   PetscBLASInt   i1        = 1,dlen;

490:   uu_ptr=solve_uu;
491:   PCTFS_rvec_zero(uu_ptr,m);

493:   /* x  = X.Y^T.b */
494:   /* uu = Y^T.b */
495:   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
496:     off       =*iptr++;
497:     len       =*iptr++;
498:     PetscBLASIntCast(len,&dlen);
499:     PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1));
500:   }

502:   /* comunication of beta */
503:   uu_ptr=solve_uu;
504:   if (level) PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);

506:   PCTFS_rvec_zero(uc,n);

508:   /* x = X.uu */
509:   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len) {
510:     off  =*iptr++;
511:     len  =*iptr++;
512:     PetscBLASIntCast(len,&dlen);
513:     PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
514:   }
515:   return(0);
516: }

518: /**************************************xxt.c***********************************/
519: static PetscErrorCode check_handle(xxt_ADT xxt_handle)
520: {
521:   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};

524:   if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);

526:   vals[0]=vals[1]=xxt_handle->id;
527:   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
528:   if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);
529:   return(0);
530: }

532: /**************************************xxt.c***********************************/
533: static PetscErrorCode det_separators(xxt_ADT xxt_handle)
534: {
535:   PetscInt     i, ct, id;
536:   PetscInt     mask, edge, *iptr;
537:   PetscInt     *dir, *used;
538:   PetscInt     sum[4], w[4];
539:   PetscScalar  rsum[4], rw[4];
540:   PetscInt     op[] = {GL_ADD,0};
541:   PetscScalar  *lhs, *rhs;
542:   PetscInt     *nsep, *lnsep, *fo, nfo=0;
543:   PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
544:   PetscInt     *local2global   = xxt_handle->mvi->local2global;
545:   PetscInt     n               = xxt_handle->mvi->n;
546:   PetscInt     m               = xxt_handle->mvi->m;
547:   PetscInt     level           = xxt_handle->level;
548:   PetscInt     shared          = 0;

551:   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
552:   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
553:   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
554:   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
555:   used = (PetscInt*)malloc(sizeof(PetscInt)*n);

557:   PCTFS_ivec_zero(dir,level+1);
558:   PCTFS_ivec_zero(nsep,level+1);
559:   PCTFS_ivec_zero(lnsep,level+1);
560:   PCTFS_ivec_set (fo,-1,n+1);
561:   PCTFS_ivec_zero(used,n);

563:   lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
564:   rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);

566:   /* determine the # of unique dof */
567:   PCTFS_rvec_zero(lhs,m);
568:   PCTFS_rvec_set(lhs,1.0,n);
569:   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
570:   PCTFS_rvec_zero(rsum,2);
571:   for (ct=i=0; i<n; i++) {
572:     if (lhs[i]!=0.0) {
573:       rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];
574:     }
575:   }
576:   PCTFS_grop_hc(rsum,rw,2,op,level);
577:   rsum[0]+=0.1;
578:   rsum[1]+=0.1;

580:   if (PetscAbsScalar(rsum[0]-rsum[1])>EPS) shared=1;

582:   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
583:   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];

585:   /* determine separator sets top down */
586:   if (shared) {
587:     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {

589:       /* set rsh of hc, fire, and collect lhs responses */
590:       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
591:       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);

593:       /* set lsh of hc, fire, and collect rhs responses */
594:       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
595:       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
596: 
597:       for (i=0;i<n;i++) {
598:         if (id< mask) {
599:           if (lhs[i]!=0.0) lhs[i]=1.0;
600:         }
601: 
602:         if (id>=mask) {
603:           if (rhs[i]!=0.0) rhs[i]=1.0;
604:         }
605:       }

607:       if (id< mask) PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1);
608:       else          PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1);

610:       /* count number of dofs I own that have signal and not in sep set */
611:       PCTFS_rvec_zero(rsum,4);
612:       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
613: 
614:         if (!used[i]) {
615:           /* number of unmarked dofs on node */
616:           ct++;
617:           /* number of dofs to be marked on lhs hc */
618:           if (id< mask) {
619:             if (lhs[i]!=0.0) { sum[0]++; rsum[0]+=1.0/lhs[i]; }
620:           }
621:           /* number of dofs to be marked on rhs hc */
622:           if (id>=mask) {
623:             if (rhs[i]!=0.0) { sum[1]++; rsum[1]+=1.0/rhs[i]; }
624:           }
625:         }
626:       }

628:       /* go for load balance - choose half with most unmarked dofs, bias LHS */
629:       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
630:       (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
631:       PCTFS_giop_hc(sum,w,4,op,edge);
632:       PCTFS_grop_hc(rsum,rw,4,op,edge);
633:       rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;

635:       if (id<mask) {
636:         /* mark dofs I own that have signal and not in sep set */
637:         for (ct=i=0;i<n;i++) {
638:           if ((!used[i])&&(lhs[i]!=0.0)) {
639:             ct++; nfo++;

641:             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");

643:             *--iptr = local2global[i];
644:             used[i] = edge;
645:           }
646:         }
647:         if (ct>1) PCTFS_ivec_sort(iptr,ct);

649:         lnsep[edge]=ct;
650:         nsep[edge]=(PetscInt) rsum[0];
651:         dir [edge]=LEFT;
652:       }

654:       if (id>=mask) {
655:         /* mark dofs I own that have signal and not in sep set */
656:         for (ct=i=0;i<n;i++) {
657:           if ((!used[i])&&(rhs[i]!=0.0)) {
658:             ct++; nfo++;

660:             if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");

662:             *--iptr = local2global[i];
663:             used[i] = edge;
664:           }
665:         }
666:         if (ct>1) PCTFS_ivec_sort(iptr,ct);

668:         lnsep[edge] = ct;
669:         nsep[edge]  = (PetscInt) rsum[1];
670:         dir [edge]  = RIGHT;
671:       }

673:       /* LATER or we can recur on these to order seps at this level */
674:       /* do we need full set of separators for this?                */

676:       /* fold rhs hc into lower */
677:       if (id>=mask) id-=mask;
678:     }
679:   } else {
680:     for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
681:       /* set rsh of hc, fire, and collect lhs responses */
682:       (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
683:       PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);

685:       /* set lsh of hc, fire, and collect rhs responses */
686:       (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
687:       PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);

689:       /* count number of dofs I own that have signal and not in sep set */
690:       for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
691:         if (!used[i]) {
692:           /* number of unmarked dofs on node */
693:           ct++;
694:           /* number of dofs to be marked on lhs hc */
695:           if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
696:           /* number of dofs to be marked on rhs hc */
697:           if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
698:         }
699:       }

701:       /* go for load balance - choose half with most unmarked dofs, bias LHS */
702:       (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
703:       PCTFS_giop_hc(sum,w,4,op,edge);

705:       /* lhs hc wins */
706:       if (sum[2]>=sum[3]) {
707:         if (id<mask) {
708:           /* mark dofs I own that have signal and not in sep set */
709:           for (ct=i=0;i<n;i++) {
710:             if ((!used[i])&&(lhs[i]!=0.0)) {
711:               ct++; nfo++;
712:               *--iptr = local2global[i];
713:               used[i]=edge;
714:             }
715:           }
716:           if (ct>1) PCTFS_ivec_sort(iptr,ct);
717:           lnsep[edge]=ct;
718:         }
719:         nsep[edge]=sum[0];
720:         dir [edge]=LEFT;
721:       } else { /* rhs hc wins */
722:         if (id>=mask) {
723:           /* mark dofs I own that have signal and not in sep set */
724:           for (ct=i=0;i<n;i++) {
725:             if ((!used[i])&&(rhs[i]!=0.0)) {
726:               ct++; nfo++;
727:               *--iptr = local2global[i];
728:               used[i]=edge;
729:             }
730:           }
731:           if (ct>1) PCTFS_ivec_sort(iptr,ct);
732:           lnsep[edge]=ct;
733:         }
734:         nsep[edge]=sum[1];
735:         dir [edge]=RIGHT;
736:       }
737:       /* LATER or we can recur on these to order seps at this level */
738:       /* do we need full set of separators for this?                */

740:       /* fold rhs hc into lower */
741:       if (id>=mask) id-=mask;
742:     }
743:   }


746:   /* level 0 is on processor case - so mark the remainder */
747:   for (ct=i=0; i<n; i++) {
748:     if (!used[i]) {
749:       ct++; nfo++;
750:       *--iptr = local2global[i];
751:       used[i] = edge;
752:     }
753:   }
754:   if (ct>1) PCTFS_ivec_sort(iptr,ct);
755:   lnsep[edge]=ct;
756:   nsep [edge]=ct;
757:   dir  [edge]=LEFT;

759:   xxt_handle->info->nsep  = nsep;
760:   xxt_handle->info->lnsep = lnsep;
761:   xxt_handle->info->fo    = fo;
762:   xxt_handle->info->nfo   = nfo;

764:   free(dir);
765:   free(lhs);
766:   free(rhs);
767:   free(used);
768:   return(0);
769: }

771: /**************************************xxt.c***********************************/
772: static mv_info *set_mvi(PetscInt *local2global,PetscInt n,PetscInt m,PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*),void *grid_data)
773: {
774:   mv_info *mvi;


777:   mvi               = (mv_info*)malloc(sizeof(mv_info));
778:   mvi->n            = n;
779:   mvi->m            = m;
780:   mvi->n_global     = -1;
781:   mvi->m_global     = -1;
782:   mvi->local2global = (PetscInt*)malloc((m+1)*sizeof(PetscInt));
783:   PCTFS_ivec_copy(mvi->local2global,local2global,m);
784:   mvi->local2global[m] = INT_MAX;
785:   mvi->matvec          = matvec;
786:   mvi->grid_data       = grid_data;

788:   /* set xxt communication handle to perform restricted matvec */
789:   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);

791:   return(mvi);
792: }

794: /**************************************xxt.c***********************************/
795: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
796: {
798:   A->matvec((mv_info*)A->grid_data,v,u);
799:   return(0);
800: }