Actual source code: xxt.c

petsc-3.3-p7 2013-05-11
  2: /*************************************xxt.c************************************
  3: Module Name: xxt
  4: Module Info:

  6: author:  Henry M. Tufo III
  7: e-mail:  hmt@asci.uchicago.edu
  8: contact:
  9: +--------------------------------+--------------------------------+
 10: |MCS Division - Building 221     |Department of Computer Science  |
 11: |Argonne National Laboratory     |Ryerson 152                     |
 12: |9700 S. Cass Avenue             |The University of Chicago       |
 13: |Argonne, IL  60439              |Chicago, IL  60637              |
 14: |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
 15: +--------------------------------+--------------------------------+

 17: Last Modification: 3.20.01
 18: **************************************xxt.c***********************************/
 19: #include <../src/ksp/pc/impls/tfs/tfs.h>

 21: #define LEFT  -1
 22: #define RIGHT  1
 23: #define BOTH   0

 25: typedef struct xxt_solver_info {
 26:   PetscInt n, m, n_global, m_global;
 27:   PetscInt nnz, max_nnz, msg_buf_sz;
 28:   PetscInt *nsep, *lnsep, *fo, nfo, *stages;
 29:   PetscInt *col_sz, *col_indices;
 30:   PetscScalar **col_vals, *x, *solve_uu, *solve_w;
 31:   PetscInt nsolves;
 32:   PetscScalar tot_solve_time;
 33: } xxt_info;

 35: typedef struct matvec_info {
 36:   PetscInt n, m, n_global, m_global;
 37:   PetscInt *local2global;
 38:   PCTFS_gs_ADT PCTFS_gs_handle;
 39:   PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
 40:   void *grid_data;
 41: } mv_info;

 43: struct xxt_CDT{
 44:   PetscInt id;
 45:   PetscInt ns;
 46:   PetscInt level;
 47:   xxt_info *info;
 48:   mv_info  *mvi;
 49: };

 51: static PetscInt n_xxt=0;
 52: static PetscInt n_xxt_handles=0;

 54: /* prototypes */
 55: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
 56: static PetscErrorCode check_handle(xxt_ADT xxt_handle);
 57: static PetscErrorCode det_separators(xxt_ADT xxt_handle);
 58: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
 59: static PetscInt xxt_generate(xxt_ADT xxt_handle);
 60: static PetscInt do_xxt_factor(xxt_ADT xxt_handle);
 61: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data);

 63: /**************************************xxt.c***********************************/
 64: xxt_ADT XXT_new(void)
 65: {
 66:   xxt_ADT xxt_handle;

 68:   /* rolling count on n_xxt ... pot. problem here */
 69:   n_xxt_handles++;
 70:   xxt_handle       = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
 71:   xxt_handle->id   = ++n_xxt;
 72:   xxt_handle->info = NULL; xxt_handle->mvi  = NULL;

 74:   return(xxt_handle);
 75: }

 77: /**************************************xxt.c***********************************/
 78: PetscInt XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt  handle */
 79:            PetscInt *local2global,  /* global column mapping       */
 80:            PetscInt n,              /* local num rows              */
 81:            PetscInt m,              /* local num cols              */
 82:            void *matvec,       /* b_loc=A_local.x_loc         */
 83:            void *grid_data     /* grid data for matvec        */
 84:            )
 85: {
 86:   PCTFS_comm_init();
 87:   check_handle(xxt_handle);

 89:   /* only 2^k for now and all nodes participating */
 90:   if ((1<<(xxt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);

 92:   /* space for X info */
 93:   xxt_handle->info = (xxt_info*)malloc(sizeof(xxt_info));

 95:   /* set up matvec handles */
 96:   xxt_handle->mvi  = set_mvi(local2global, n, m, matvec, grid_data);

 98:   /* matrix is assumed to be of full rank */
 99:   /* LATER we can reset to indicate rank def. */
100:   xxt_handle->ns=0;

102:   /* determine separators and generate firing order - NB xxt info set here */
103:   det_separators(xxt_handle);

105:   return(do_xxt_factor(xxt_handle));
106: }

108: /**************************************xxt.c***********************************/
109: PetscInt XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
110: {

112:   PCTFS_comm_init();
113:   check_handle(xxt_handle);

115:   /* need to copy b into x? */
116:   if (b)
117:     {PCTFS_rvec_copy(x,b,xxt_handle->mvi->n);}
118:   do_xxt_solve(xxt_handle,x);

120:   return(0);
121: }

123: /**************************************xxt.c***********************************/
124: PetscInt XXT_free(xxt_ADT xxt_handle)
125: {

127:   PCTFS_comm_init();
128:   check_handle(xxt_handle);
129:   n_xxt_handles--;

131:   free(xxt_handle->info->nsep);
132:   free(xxt_handle->info->lnsep);
133:   free(xxt_handle->info->fo);
134:   free(xxt_handle->info->stages);
135:   free(xxt_handle->info->solve_uu);
136:   free(xxt_handle->info->solve_w);
137:   free(xxt_handle->info->x);
138:   free(xxt_handle->info->col_vals);
139:   free(xxt_handle->info->col_sz);
140:   free(xxt_handle->info->col_indices);
141:   free(xxt_handle->info);
142:   free(xxt_handle->mvi->local2global);
143:    PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle);
144:   free(xxt_handle->mvi);
145:   free(xxt_handle);

147:   /* if the check fails we nuke */
148:   /* if NULL pointer passed to free we nuke */
149:   /* if the calls to free fail that's not my problem */
150:   return(0);
151: }

153: /**************************************xxt.c***********************************/
154: PetscInt XXT_stats(xxt_ADT xxt_handle)
155: {
156:   PetscInt       op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
157:   PetscInt       fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
158:   PetscInt       vals[9],  work[9];
159:   PetscScalar    fvals[3], fwork[3];

162:   PCTFS_comm_init();
163:   check_handle(xxt_handle);

165:   /* if factorization not done there are no stats */
166:   if (!xxt_handle->info||!xxt_handle->mvi)
167:     {
168:       if (!PCTFS_my_id) {PetscPrintf(PETSC_COMM_WORLD,"XXT_stats() :: no stats available!\n");}
169:       return 1;
170:     }

172:   vals[0]=vals[1]=vals[2]=xxt_handle->info->nnz;
173:   vals[3]=vals[4]=vals[5]=xxt_handle->mvi->n;
174:   vals[6]=vals[7]=vals[8]=xxt_handle->info->msg_buf_sz;
175:   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);

177:   fvals[0]=fvals[1]=fvals[2]
178:     =xxt_handle->info->tot_solve_time/xxt_handle->info->nsolves++;
179:   PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);

181:   if (!PCTFS_my_id)
182:     {
183:       PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_nnz=%D\n",PCTFS_my_id,vals[0]);
184:       PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_nnz=%D\n",PCTFS_my_id,vals[1]);
185:       PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
186:       PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_nnz=%D\n",PCTFS_my_id,vals[2]);
187:       PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(2d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.5)));
188:       PetscPrintf(PETSC_COMM_WORLD,"%D :: xxt   C(3d)  =%g\n",PCTFS_my_id,vals[2]/(pow(1.0*vals[5],1.6667)));
189:       PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_n  =%D\n",PCTFS_my_id,vals[3]);
190:       PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_n  =%D\n",PCTFS_my_id,vals[4]);
191:       PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_n  =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
192:       PetscPrintf(PETSC_COMM_WORLD,"%D :: tot   xxt_n  =%D\n",PCTFS_my_id,vals[5]);
193:       PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_buf=%D\n",PCTFS_my_id,vals[6]);
194:       PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_buf=%D\n",PCTFS_my_id,vals[7]);
195:       PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
196:       PetscPrintf(PETSC_COMM_WORLD,"%D :: min   xxt_slv=%g\n",PCTFS_my_id,fvals[0]);
197:       PetscPrintf(PETSC_COMM_WORLD,"%D :: max   xxt_slv=%g\n",PCTFS_my_id,fvals[1]);
198:       PetscPrintf(PETSC_COMM_WORLD,"%D :: avg   xxt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
199:     }

201:   return(0);
202: }

204: /*************************************xxt.c************************************

206: Description: get A_local, local portion of global coarse matrix which 
207: is a row dist. nxm matrix w/ n<m.
208:    o my_ml holds address of ML struct associated w/A_local and coarse grid
209:    o local2global holds global number of column i (i=0,...,m-1)
210:    o local2global holds global number of row    i (i=0,...,n-1)
211:    o mylocmatvec performs A_local . vec_local (note that gs is performed using 
212:    PCTFS_gs_init/gop).

214: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
215: mylocmatvec (void :: void *data, double *in, double *out)
216: **************************************xxt.c***********************************/
217: static PetscInt do_xxt_factor(xxt_ADT xxt_handle)
218: {
219:   return xxt_generate(xxt_handle);
220: }

222: /**************************************xxt.c***********************************/
223: static PetscInt xxt_generate(xxt_ADT xxt_handle)
224: {
225:   PetscInt i,j,k,idex;
226:   PetscInt dim, col;
227:   PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
228:   PetscInt *segs;
229:   PetscInt op[] = {GL_ADD,0};
230:   PetscInt off, len;
231:   PetscScalar *x_ptr;
232:   PetscInt *iptr, flag;
233:   PetscInt start=0, end, work;
234:   PetscInt op2[] = {GL_MIN,0};
235:   PCTFS_gs_ADT PCTFS_gs_handle;
236:   PetscInt *nsep, *lnsep, *fo;
237:   PetscInt a_n=xxt_handle->mvi->n;
238:   PetscInt a_m=xxt_handle->mvi->m;
239:   PetscInt *a_local2global=xxt_handle->mvi->local2global;
240:   PetscInt level;
241:   PetscInt xxt_nnz=0, xxt_max_nnz=0;
242:   PetscInt n, m;
243:   PetscInt *col_sz, *col_indices, *stages;
244:   PetscScalar **col_vals, *x;
245:   PetscInt n_global;
246:   PetscInt xxt_zero_nnz=0;
247:   PetscInt xxt_zero_nnz_0=0;
248:   PetscBLASInt i1 = 1,dlen;
249:   PetscScalar dm1 = -1.0;

252:   n=xxt_handle->mvi->n;
253:   nsep=xxt_handle->info->nsep;
254:   lnsep=xxt_handle->info->lnsep;
255:   fo=xxt_handle->info->fo;
256:   end=lnsep[0];
257:   level=xxt_handle->level;
258:   PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle;

260:   /* is there a null space? */
261:   /* LATER add in ability to detect null space by checking alpha */
262:   for (i=0, j=0; i<=level; i++)
263:     {j+=nsep[i];}

265:   m = j-xxt_handle->ns;
266:   if (m!=j)
267:     {PetscPrintf(PETSC_COMM_WORLD,"xxt_generate() :: null space exists %D %D %D\n",m,j,xxt_handle->ns);}

269:   /* get and initialize storage for x local         */
270:   /* note that x local is nxm and stored by columns */
271:   col_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
272:   col_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
273:   col_vals = (PetscScalar **) malloc(m*sizeof(PetscScalar *));
274:   for (i=j=0; i<m; i++, j+=2)
275:     {
276:       col_indices[j]=col_indices[j+1]=col_sz[i]=-1;
277:       col_vals[i] = NULL;
278:     }
279:   col_indices[j]=-1;

281:   /* size of separators for each sub-hc working from bottom of tree to top */
282:   /* this looks like nsep[]=segments */
283:   stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
284:   segs   = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
285:   PCTFS_ivec_zero(stages,level+1);
286:   PCTFS_ivec_copy(segs,nsep,level+1);
287:   for (i=0; i<level; i++)
288:     {segs[i+1] += segs[i];}
289:   stages[0] = segs[0];

291:   /* temporary vectors  */
292:   u  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
293:   z  = (PetscScalar *) malloc(n*sizeof(PetscScalar));
294:   v  = (PetscScalar *) malloc(a_m*sizeof(PetscScalar));
295:   uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
296:   w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));

298:   /* extra nnz due to replication of vertices across separators */
299:   for (i=1, j=0; i<=level; i++)
300:     {j+=nsep[i];}

302:   /* storage for sparse x values */
303:   n_global = xxt_handle->info->n_global;
304:   xxt_max_nnz = (PetscInt)(2.5*pow(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
305:   x = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
306:   xxt_nnz = 0;

308:   /* LATER - can embed next sep to fire in gs */
309:   /* time to make the donuts - generate X factor */
310:   for (dim=i=j=0;i<m;i++)
311:     {
312:       /* time to move to the next level? */
313:       while (i==segs[dim]) {
314:         if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
315:         stages[dim++]=i;
316:         end+=lnsep[dim];
317:       }
318:       stages[dim]=i;

320:       /* which column are we firing? */
321:       /* i.e. set v_l */
322:       /* use new seps and do global min across hc to determine which one to fire */
323:       (start<end) ? (col=fo[start]) : (col=INT_MAX);
324:       PCTFS_giop_hc(&col,&work,1,op2,dim);

326:       /* shouldn't need this */
327:       if (col==INT_MAX)
328:         {
329:           PetscInfo(0,"hey ... col==INT_MAX??\n");
330:           continue;
331:         }

333:       /* do I own it? I should */
334:       PCTFS_rvec_zero(v ,a_m);
335:       if (col==fo[start])
336:         {
337:           start++;
338:           idex=PCTFS_ivec_linear_search(col, a_local2global, a_n);
339:           if (idex!=-1){
340:             v[idex] = 1.0; j++;
341:           } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
342:         }
343:       else
344:         {
345:           idex=PCTFS_ivec_linear_search(col, a_local2global, a_m);
346:           if (idex!=-1)
347:             {v[idex] = 1.0;}
348:         }

350:       /* perform u = A.v_l */
351:       PCTFS_rvec_zero(u,n);
352:       do_matvec(xxt_handle->mvi,v,u);

354:       /* uu =  X^T.u_l (local portion) */
355:       /* technically only need to zero out first i entries */
356:       /* later turn this into an XXT_solve call ? */
357:       PCTFS_rvec_zero(uu,m);
358:       x_ptr=x;
359:       iptr = col_indices;
360:       for (k=0; k<i; k++)
361:         {
362:           off = *iptr++;
363:           len = *iptr++;
364:           dlen = PetscBLASIntCast(len);
365:           uu[k] = BLASdot_(&dlen,u+off,&i1,x_ptr,&i1);
366:           x_ptr+=len;
367:         }


370:       /* uu = X^T.u_l (comm portion) */
371:       PCTFS_ssgl_radd  (uu, w, dim, stages);

373:       /* z = X.uu */
374:       PCTFS_rvec_zero(z,n);
375:       x_ptr=x;
376:       iptr = col_indices;
377:       for (k=0; k<i; k++)
378:         {
379:           off = *iptr++;
380:           len = *iptr++;
381:           dlen = PetscBLASIntCast(len);
382:           BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1);
383:           x_ptr+=len;
384:         }

386:       /* compute v_l = v_l - z */
387:       PCTFS_rvec_zero(v+a_n,a_m-a_n);
388:       dlen = PetscBLASIntCast(n);
389:       BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1);

391:       /* compute u_l = A.v_l */
392:       if (a_n!=a_m)
393:         {PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);}
394:       PCTFS_rvec_zero(u,n);
395:       do_matvec(xxt_handle->mvi,v,u);

397:       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
398:       dlen = PetscBLASIntCast(n);
399:       alpha = BLASdot_(&dlen,u,&i1,v,&i1);
400:       /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
401:       PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);

403:       alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);

405:       /* check for small alpha                             */
406:       /* LATER use this to detect and determine null space */
407:       if (fabs(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);

409:       /* compute v_l = v_l/sqrt(alpha) */
410:       PCTFS_rvec_scale(v,1.0/alpha,n);

412:       /* add newly generated column, v_l, to X */
413:       flag = 1;
414:       off=len=0;
415:       for (k=0; k<n; k++)
416:         {
417:           if (v[k]!=0.0)
418:             {
419:               len=k;
420:               if (flag)
421:                 {off=k; flag=0;}
422:             }
423:         }

425:       len -= (off-1);

427:       if (len>0)
428:         {
429:           if ((xxt_nnz+len)>xxt_max_nnz)
430:             {
431:               PetscInfo(0,"increasing space for X by 2x!\n");
432:               xxt_max_nnz *= 2;
433:               x_ptr = (PetscScalar *) malloc(xxt_max_nnz*sizeof(PetscScalar));
434:               PCTFS_rvec_copy(x_ptr,x,xxt_nnz);
435:               free(x);
436:               x = x_ptr;
437:               x_ptr+=xxt_nnz;
438:             }
439:           xxt_nnz += len;
440:           PCTFS_rvec_copy(x_ptr,v+off,len);

442:           /* keep track of number of zeros */
443:           if (dim)
444:             {
445:               for (k=0; k<len; k++)
446:                 {
447:                   if (x_ptr[k]==0.0)
448:                     {xxt_zero_nnz++;}
449:                 }
450:             }
451:           else
452:             {
453:               for (k=0; k<len; k++)
454:                 {
455:                   if (x_ptr[k]==0.0)
456:                     {xxt_zero_nnz_0++;}
457:                 }
458:             }
459:           col_indices[2*i] = off;
460:           col_sz[i] = col_indices[2*i+1] = len;
461:           col_vals[i] = x_ptr;
462:         }
463:       else
464:         {
465:           col_indices[2*i] = 0;
466:           col_sz[i] = col_indices[2*i+1] = 0;
467:           col_vals[i] = x_ptr;
468:         }
469:     }

471:   /* close off stages for execution phase */
472:   while (dim!=level)
473:     {
474:       stages[dim++]=i;
475:       PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);
476:     }
477:   stages[dim]=i;

479:   xxt_handle->info->n=xxt_handle->mvi->n;
480:   xxt_handle->info->m=m;
481:   xxt_handle->info->nnz=xxt_nnz;
482:   xxt_handle->info->max_nnz=xxt_max_nnz;
483:   xxt_handle->info->msg_buf_sz=stages[level]-stages[0];
484:   xxt_handle->info->solve_uu = (PetscScalar *) malloc(m*sizeof(PetscScalar));
485:   xxt_handle->info->solve_w  = (PetscScalar *) malloc(m*sizeof(PetscScalar));
486:   xxt_handle->info->x=x;
487:   xxt_handle->info->col_vals=col_vals;
488:   xxt_handle->info->col_sz=col_sz;
489:   xxt_handle->info->col_indices=col_indices;
490:   xxt_handle->info->stages=stages;
491:   xxt_handle->info->nsolves=0;
492:   xxt_handle->info->tot_solve_time=0.0;

494:   free(segs);
495:   free(u);
496:   free(v);
497:   free(uu);
498:   free(z);
499:   free(w);

501:   return(0);
502: }

504: /**************************************xxt.c***********************************/
505: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle,  PetscScalar *uc)
506: {
507:    PetscInt off, len, *iptr;
508:   PetscInt level       =xxt_handle->level;
509:   PetscInt n           =xxt_handle->info->n;
510:   PetscInt m           =xxt_handle->info->m;
511:   PetscInt *stages     =xxt_handle->info->stages;
512:   PetscInt *col_indices=xxt_handle->info->col_indices;
513:   PetscScalar *x_ptr, *uu_ptr;
514:   PetscScalar *solve_uu=xxt_handle->info->solve_uu;
515:   PetscScalar *solve_w =xxt_handle->info->solve_w;
516:   PetscScalar *x       =xxt_handle->info->x;
517:   PetscBLASInt i1 = 1,dlen;

520:   uu_ptr=solve_uu;
521:   PCTFS_rvec_zero(uu_ptr,m);

523:   /* x  = X.Y^T.b */
524:   /* uu = Y^T.b */
525:   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
526:     {
527:       off=*iptr++; len=*iptr++;
528:       dlen = PetscBLASIntCast(len);
529:       *uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,x_ptr,&i1);
530:     }

532:   /* comunication of beta */
533:   uu_ptr=solve_uu;
534:   if (level) {PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);}

536:   PCTFS_rvec_zero(uc,n);

538:   /* x = X.uu */
539:   for (x_ptr=x,iptr=col_indices; *iptr!=-1; x_ptr+=len)
540:     {
541:       off=*iptr++; len=*iptr++;
542:       dlen = PetscBLASIntCast(len);
543:       BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1);
544:     }
545:   return(0);
546: }

548: /**************************************xxt.c***********************************/
549: static PetscErrorCode check_handle(xxt_ADT xxt_handle)
550: {
551:   PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};

554:   if (!xxt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xxt_handle);

556:   vals[0]=vals[1]=xxt_handle->id;
557:   PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
558:   if ((vals[0]!=vals[1])||(xxt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n",vals[0],vals[1], xxt_handle->id);
559:   return(0);
560: }

562: /**************************************xxt.c***********************************/
563: static  PetscErrorCode det_separators(xxt_ADT xxt_handle)
564: {
565:   PetscInt i, ct, id;
566:   PetscInt mask, edge, *iptr;
567:   PetscInt *dir, *used;
568:   PetscInt sum[4], w[4];
569:   PetscScalar rsum[4], rw[4];
570:   PetscInt op[] = {GL_ADD,0};
571:   PetscScalar *lhs, *rhs;
572:   PetscInt *nsep, *lnsep, *fo, nfo=0;
573:   PCTFS_gs_ADT PCTFS_gs_handle=xxt_handle->mvi->PCTFS_gs_handle;
574:   PetscInt *local2global=xxt_handle->mvi->local2global;
575:   PetscInt  n=xxt_handle->mvi->n;
576:   PetscInt  m=xxt_handle->mvi->m;
577:   PetscInt level=xxt_handle->level;
578:   PetscInt shared=0;

581:   dir  = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
582:   nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
583:   lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
584:   fo   = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
585:   used = (PetscInt*)malloc(sizeof(PetscInt)*n);

587:   PCTFS_ivec_zero(dir  ,level+1);
588:   PCTFS_ivec_zero(nsep ,level+1);
589:   PCTFS_ivec_zero(lnsep,level+1);
590:   PCTFS_ivec_set (fo   ,-1,n+1);
591:   PCTFS_ivec_zero(used,n);

593:   lhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
594:   rhs  = (PetscScalar*)malloc(sizeof(PetscScalar)*m);

596:   /* determine the # of unique dof */
597:   PCTFS_rvec_zero(lhs,m);
598:   PCTFS_rvec_set(lhs,1.0,n);
599:   PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
600:   PCTFS_rvec_zero(rsum,2);
601:   for (ct=i=0;i<n;i++)
602:     {
603:       if (lhs[i]!=0.0)
604:         {rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i];}
605:     }
606:   PCTFS_grop_hc(rsum,rw,2,op,level);
607:   rsum[0]+=0.1;
608:   rsum[1]+=0.1;

610:   if (fabs(rsum[0]-rsum[1])>EPS)
611:     {shared=1;}

613:   xxt_handle->info->n_global=xxt_handle->info->m_global=(PetscInt) rsum[0];
614:   xxt_handle->mvi->n_global =xxt_handle->mvi->m_global =(PetscInt) rsum[0];

616:   /* determine separator sets top down */
617:   if (shared)
618:     {
619:       for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
620:         {
621:           /* set rsh of hc, fire, and collect lhs responses */
622:           (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
623:           PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
624: 
625:           /* set lsh of hc, fire, and collect rhs responses */
626:           (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
627:           PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
628: 
629:           for (i=0;i<n;i++)
630:             {
631:               if (id< mask)
632:                 {
633:                   if (lhs[i]!=0.0)
634:                     {lhs[i]=1.0;}
635:                 }
636:               if (id>=mask)
637:                 {
638:                   if (rhs[i]!=0.0)
639:                     {rhs[i]=1.0;}
640:                 }
641:             }

643:           if (id< mask)
644:             {PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge-1);}
645:           else
646:             {PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge-1);}

648:           /* count number of dofs I own that have signal and not in sep set */
649:           PCTFS_rvec_zero(rsum,4);
650:           for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++)
651:             {
652:               if (!used[i])
653:                 {
654:                   /* number of unmarked dofs on node */
655:                   ct++;
656:                   /* number of dofs to be marked on lhs hc */
657:                   if (id< mask)
658:                     {
659:                       if (lhs[i]!=0.0)
660:                         {sum[0]++; rsum[0]+=1.0/lhs[i];}
661:                     }
662:                   /* number of dofs to be marked on rhs hc */
663:                   if (id>=mask)
664:                     {
665:                       if (rhs[i]!=0.0)
666:                         {sum[1]++; rsum[1]+=1.0/rhs[i];}
667:                     }
668:                 }
669:             }

671:           /* go for load balance - choose half with most unmarked dofs, bias LHS */
672:           (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
673:           (id<mask) ? (rsum[2]=ct) : (rsum[3]=ct);
674:           PCTFS_giop_hc(sum,w,4,op,edge);
675:           PCTFS_grop_hc(rsum,rw,4,op,edge);
676:           rsum[0]+=0.1; rsum[1]+=0.1; rsum[2]+=0.1; rsum[3]+=0.1;

678:           if (id<mask)
679:             {
680:               /* mark dofs I own that have signal and not in sep set */
681:               for (ct=i=0;i<n;i++)
682:                 {
683:                   if ((!used[i])&&(lhs[i]!=0.0))
684:                     {
685:                       ct++; nfo++;

687:                       if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");

689:                       *--iptr = local2global[i];
690:                       used[i]=edge;
691:                     }
692:                 }
693:               if (ct>1) {PCTFS_ivec_sort(iptr,ct);}

695:               lnsep[edge]=ct;
696:               nsep[edge]=(PetscInt) rsum[0];
697:               dir [edge]=LEFT;
698:             }

700:           if (id>=mask)
701:             {
702:               /* mark dofs I own that have signal and not in sep set */
703:               for (ct=i=0;i<n;i++)
704:                 {
705:                   if ((!used[i])&&(rhs[i]!=0.0))
706:                     {
707:                       ct++; nfo++;

709:                       if (nfo>n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nfo about to exceed n\n");

711:                       *--iptr = local2global[i];
712:                       used[i]=edge;
713:                     }
714:                 }
715:               if (ct>1) {PCTFS_ivec_sort(iptr,ct);}

717:               lnsep[edge]=ct;
718:               nsep[edge]= (PetscInt) rsum[1];
719:               dir [edge]=RIGHT;
720:             }

722:           /* LATER or we can recur on these to order seps at this level */
723:           /* do we need full set of separators for this?                */

725:           /* fold rhs hc into lower */
726:           if (id>=mask)
727:             {id-=mask;}
728:         }
729:     }
730:   else
731:     {
732:       for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1)
733:         {
734:           /* set rsh of hc, fire, and collect lhs responses */
735:           (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
736:           PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);

738:           /* set lsh of hc, fire, and collect rhs responses */
739:           (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
740:           PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);

742:           /* count number of dofs I own that have signal and not in sep set */
743:           for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++)
744:             {
745:               if (!used[i])
746:                 {
747:                   /* number of unmarked dofs on node */
748:                   ct++;
749:                   /* number of dofs to be marked on lhs hc */
750:                   if ((id< mask)&&(lhs[i]!=0.0)) {sum[0]++;}
751:                   /* number of dofs to be marked on rhs hc */
752:                   if ((id>=mask)&&(rhs[i]!=0.0)) {sum[1]++;}
753:                 }
754:             }

756:           /* go for load balance - choose half with most unmarked dofs, bias LHS */
757:           (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
758:           PCTFS_giop_hc(sum,w,4,op,edge);

760:           /* lhs hc wins */
761:           if (sum[2]>=sum[3])
762:             {
763:               if (id<mask)
764:                 {
765:                   /* mark dofs I own that have signal and not in sep set */
766:                   for (ct=i=0;i<n;i++)
767:                     {
768:                       if ((!used[i])&&(lhs[i]!=0.0))
769:                         {
770:                           ct++; nfo++;
771:                           *--iptr = local2global[i];
772:                           used[i]=edge;
773:                         }
774:                     }
775:                   if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
776:                   lnsep[edge]=ct;
777:                 }
778:               nsep[edge]=sum[0];
779:               dir [edge]=LEFT;
780:             }
781:           /* rhs hc wins */
782:           else
783:             {
784:               if (id>=mask)
785:                 {
786:                   /* mark dofs I own that have signal and not in sep set */
787:                   for (ct=i=0;i<n;i++)
788:                     {
789:                       if ((!used[i])&&(rhs[i]!=0.0))
790:                         {
791:                           ct++; nfo++;
792:                           *--iptr = local2global[i];
793:                           used[i]=edge;
794:                         }
795:                     }
796:                   if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
797:                   lnsep[edge]=ct;
798:                 }
799:               nsep[edge]=sum[1];
800:               dir [edge]=RIGHT;
801:             }
802:           /* LATER or we can recur on these to order seps at this level */
803:           /* do we need full set of separators for this?                */

805:           /* fold rhs hc into lower */
806:           if (id>=mask)
807:             {id-=mask;}
808:         }
809:     }


812:   /* level 0 is on processor case - so mark the remainder */
813:   for (ct=i=0;i<n;i++)
814:     {
815:       if (!used[i])
816:         {
817:           ct++; nfo++;
818:           *--iptr = local2global[i];
819:           used[i]=edge;
820:         }
821:     }
822:   if (ct>1) {PCTFS_ivec_sort(iptr,ct);}
823:   lnsep[edge]=ct;
824:   nsep [edge]=ct;
825:   dir  [edge]=LEFT;

827:   xxt_handle->info->nsep=nsep;
828:   xxt_handle->info->lnsep=lnsep;
829:   xxt_handle->info->fo=fo;
830:   xxt_handle->info->nfo=nfo;

832:   free(dir);
833:   free(lhs);
834:   free(rhs);
835:   free(used);
836:   return(0);
837: }

839: /**************************************xxt.c***********************************/
840: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, void *matvec, void *grid_data)
841: {
842:   mv_info *mvi;


845:   mvi = (mv_info*)malloc(sizeof(mv_info));
846:   mvi->n=n;
847:   mvi->m=m;
848:   mvi->n_global=-1;
849:   mvi->m_global=-1;
850:   mvi->local2global=(PetscInt*)malloc((m+1)*sizeof(PetscInt));
851:   PCTFS_ivec_copy(mvi->local2global,local2global,m);
852:   mvi->local2global[m] = INT_MAX;
853:   mvi->matvec=(PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec;
854:   mvi->grid_data=grid_data;

856:   /* set xxt communication handle to perform restricted matvec */
857:   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);

859:   return(mvi);
860: }

862: /**************************************xxt.c***********************************/
863: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
864: {
866:   A->matvec((mv_info*)A->grid_data,v,u);
867:   return(0);
868: }