Actual source code: xyt.c
petsc-3.9.4 2018-09-11
2: /*************************************xyt.c************************************
3: Module Name: xyt
4: Module Info:
6: author: Henry M. Tufo III
7: e-mail: hmt@asci.uchicago.edu
8: contact:
9: +--------------------------------+--------------------------------+
10: |MCS Division - Building 221 |Department of Computer Science |
11: |Argonne National Laboratory |Ryerson 152 |
12: |9700 S. Cass Avenue |The University of Chicago |
13: |Argonne, IL 60439 |Chicago, IL 60637 |
14: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
15: +--------------------------------+--------------------------------+
17: Last Modification: 3.20.01
18: **************************************xyt.c***********************************/
19: #include <../src/ksp/pc/impls/tfs/tfs.h>
21: #define LEFT -1
22: #define RIGHT 1
23: #define BOTH 0
25: typedef struct xyt_solver_info {
26: PetscInt n, m, n_global, m_global;
27: PetscInt nnz, max_nnz, msg_buf_sz;
28: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
29: PetscInt *xcol_sz, *xcol_indices;
30: PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
31: PetscInt *ycol_sz, *ycol_indices;
32: PetscScalar **ycol_vals, *y;
33: PetscInt nsolves;
34: PetscScalar tot_solve_time;
35: } xyt_info;
38: typedef struct matvec_info {
39: PetscInt n, m, n_global, m_global;
40: PetscInt *local2global;
41: PCTFS_gs_ADT PCTFS_gs_handle;
42: PetscErrorCode (*matvec)(struct matvec_info*,PetscScalar*,PetscScalar*);
43: void *grid_data;
44: } mv_info;
46: struct xyt_CDT {
47: PetscInt id;
48: PetscInt ns;
49: PetscInt level;
50: xyt_info *info;
51: mv_info *mvi;
52: };
54: static PetscInt n_xyt =0;
55: static PetscInt n_xyt_handles=0;
57: /* prototypes */
58: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
59: static PetscErrorCode check_handle(xyt_ADT xyt_handle);
60: static PetscErrorCode det_separators(xyt_ADT xyt_handle);
61: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
62: static PetscInt xyt_generate(xyt_ADT xyt_handle);
63: static PetscInt do_xyt_factor(xyt_ADT xyt_handle);
64: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data);
66: /**************************************xyt.c***********************************/
67: xyt_ADT XYT_new(void)
68: {
69: xyt_ADT xyt_handle;
71: /* rolling count on n_xyt ... pot. problem here */
72: n_xyt_handles++;
73: xyt_handle = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
74: xyt_handle->id = ++n_xyt;
75: xyt_handle->info = NULL;
76: xyt_handle->mvi = NULL;
78: return(xyt_handle);
79: }
81: /**************************************xyt.c***********************************/
82: PetscInt XYT_factor(xyt_ADT xyt_handle, /* prev. allocated xyt handle */
83: PetscInt *local2global, /* global column mapping */
84: PetscInt n, /* local num rows */
85: PetscInt m, /* local num cols */
86: PetscErrorCode (*matvec)(void*,PetscScalar*,PetscScalar*), /* b_loc=A_local.x_loc */
87: void *grid_data) /* grid data for matvec */
88: {
90: PCTFS_comm_init();
91: check_handle(xyt_handle);
93: /* only 2^k for now and all nodes participating */
94: if ((1<<(xyt_handle->level=PCTFS_i_log2_num_nodes))!=PCTFS_num_nodes) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"only 2^k for now and MPI_COMM_WORLD!!! %D != %D\n",1<<PCTFS_i_log2_num_nodes,PCTFS_num_nodes);
96: /* space for X info */
97: xyt_handle->info = (xyt_info*)malloc(sizeof(xyt_info));
99: /* set up matvec handles */
100: xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info*,PetscScalar*,PetscScalar*))matvec, grid_data);
102: /* matrix is assumed to be of full rank */
103: /* LATER we can reset to indicate rank def. */
104: xyt_handle->ns=0;
106: /* determine separators and generate firing order - NB xyt info set here */
107: det_separators(xyt_handle);
109: return(do_xyt_factor(xyt_handle));
110: }
112: /**************************************xyt.c***********************************/
113: PetscInt XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
114: {
115: PCTFS_comm_init();
116: check_handle(xyt_handle);
118: /* need to copy b into x? */
119: if (b) PCTFS_rvec_copy(x,b,xyt_handle->mvi->n);
120: do_xyt_solve(xyt_handle,x);
122: return(0);
123: }
125: /**************************************xyt.c***********************************/
126: PetscInt XYT_free(xyt_ADT xyt_handle)
127: {
128: PCTFS_comm_init();
129: check_handle(xyt_handle);
130: n_xyt_handles--;
132: free(xyt_handle->info->nsep);
133: free(xyt_handle->info->lnsep);
134: free(xyt_handle->info->fo);
135: free(xyt_handle->info->stages);
136: free(xyt_handle->info->solve_uu);
137: free(xyt_handle->info->solve_w);
138: free(xyt_handle->info->x);
139: free(xyt_handle->info->xcol_vals);
140: free(xyt_handle->info->xcol_sz);
141: free(xyt_handle->info->xcol_indices);
142: free(xyt_handle->info->y);
143: free(xyt_handle->info->ycol_vals);
144: free(xyt_handle->info->ycol_sz);
145: free(xyt_handle->info->ycol_indices);
146: free(xyt_handle->info);
147: free(xyt_handle->mvi->local2global);
148: PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
149: free(xyt_handle->mvi);
150: free(xyt_handle);
153: /* if the check fails we nuke */
154: /* if NULL pointer passed to free we nuke */
155: /* if the calls to free fail that's not my problem */
156: return(0);
157: }
159: /**************************************xyt.c***********************************/
160: PetscInt XYT_stats(xyt_ADT xyt_handle)
161: {
162: PetscInt op[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD,GL_MIN,GL_MAX,GL_ADD};
163: PetscInt fop[] = {NON_UNIFORM,GL_MIN,GL_MAX,GL_ADD};
164: PetscInt vals[9], work[9];
165: PetscScalar fvals[3], fwork[3];
167: PCTFS_comm_init();
168: check_handle(xyt_handle);
170: /* if factorization not done there are no stats */
171: if (!xyt_handle->info||!xyt_handle->mvi) {
172: if (!PCTFS_my_id) PetscPrintf(PETSC_COMM_WORLD,"XYT_stats() :: no stats available!\n");
173: return 1;
174: }
176: vals[0]=vals[1]=vals[2]=xyt_handle->info->nnz;
177: vals[3]=vals[4]=vals[5]=xyt_handle->mvi->n;
178: vals[6]=vals[7]=vals[8]=xyt_handle->info->msg_buf_sz;
179: PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
181: fvals[0]=fvals[1]=fvals[2]=xyt_handle->info->tot_solve_time/xyt_handle->info->nsolves++;
182: PCTFS_grop(fvals,fwork,sizeof(fop)/sizeof(fop[0])-1,fop);
184: if (!PCTFS_my_id) {
185: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_nnz=%D\n",PCTFS_my_id,vals[0]);
186: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_nnz=%D\n",PCTFS_my_id,vals[1]);
187: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_nnz=%g\n",PCTFS_my_id,1.0*vals[2]/PCTFS_num_nodes);
188: PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_nnz=%D\n",PCTFS_my_id,vals[2]);
189: PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(2d) =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.5)));
190: PetscPrintf(PETSC_COMM_WORLD,"%D :: xyt C(3d) =%g\n",PCTFS_my_id,vals[2]/(PetscPowReal(1.0*vals[5],1.6667)));
191: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_n =%D\n",PCTFS_my_id,vals[3]);
192: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_n =%D\n",PCTFS_my_id,vals[4]);
193: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_n =%g\n",PCTFS_my_id,1.0*vals[5]/PCTFS_num_nodes);
194: PetscPrintf(PETSC_COMM_WORLD,"%D :: tot xyt_n =%D\n",PCTFS_my_id,vals[5]);
195: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_buf=%D\n",PCTFS_my_id,vals[6]);
196: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_buf=%D\n",PCTFS_my_id,vals[7]);
197: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_buf=%g\n",PCTFS_my_id,1.0*vals[8]/PCTFS_num_nodes);
198: PetscPrintf(PETSC_COMM_WORLD,"%D :: min xyt_slv=%g\n",PCTFS_my_id,fvals[0]);
199: PetscPrintf(PETSC_COMM_WORLD,"%D :: max xyt_slv=%g\n",PCTFS_my_id,fvals[1]);
200: PetscPrintf(PETSC_COMM_WORLD,"%D :: avg xyt_slv=%g\n",PCTFS_my_id,fvals[2]/PCTFS_num_nodes);
201: }
203: return(0);
204: }
207: /*************************************xyt.c************************************
209: Description: get A_local, local portion of global coarse matrix which
210: is a row dist. nxm matrix w/ n<m.
211: o my_ml holds address of ML struct associated w/A_local and coarse grid
212: o local2global holds global number of column i (i=0,...,m-1)
213: o local2global holds global number of row i (i=0,...,n-1)
214: o mylocmatvec performs A_local . vec_local (note that gs is performed using
215: PCTFS_gs_init/gop).
217: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
218: mylocmatvec (void :: void *data, double *in, double *out)
219: **************************************xyt.c***********************************/
220: static PetscInt do_xyt_factor(xyt_ADT xyt_handle)
221: {
222: return xyt_generate(xyt_handle);
223: }
225: /**************************************xyt.c***********************************/
226: static PetscInt xyt_generate(xyt_ADT xyt_handle)
227: {
228: PetscInt i,j,k,idx;
229: PetscInt dim, col;
230: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
231: PetscInt *segs;
232: PetscInt op[] = {GL_ADD,0};
233: PetscInt off, len;
234: PetscScalar *x_ptr, *y_ptr;
235: PetscInt *iptr, flag;
236: PetscInt start =0, end, work;
237: PetscInt op2[] = {GL_MIN,0};
238: PCTFS_gs_ADT PCTFS_gs_handle;
239: PetscInt *nsep, *lnsep, *fo;
240: PetscInt a_n =xyt_handle->mvi->n;
241: PetscInt a_m =xyt_handle->mvi->m;
242: PetscInt *a_local2global=xyt_handle->mvi->local2global;
243: PetscInt level;
244: PetscInt n, m;
245: PetscInt *xcol_sz, *xcol_indices, *stages;
246: PetscScalar **xcol_vals, *x;
247: PetscInt *ycol_sz, *ycol_indices;
248: PetscScalar **ycol_vals, *y;
249: PetscInt n_global;
250: PetscInt xt_nnz =0, xt_max_nnz=0;
251: PetscInt yt_nnz =0, yt_max_nnz=0;
252: PetscInt xt_zero_nnz =0;
253: PetscInt xt_zero_nnz_0=0;
254: PetscInt yt_zero_nnz =0;
255: PetscInt yt_zero_nnz_0=0;
256: PetscBLASInt i1 = 1,dlen;
257: PetscScalar dm1 = -1.0;
260: n =xyt_handle->mvi->n;
261: nsep =xyt_handle->info->nsep;
262: lnsep =xyt_handle->info->lnsep;
263: fo =xyt_handle->info->fo;
264: end =lnsep[0];
265: level =xyt_handle->level;
266: PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
268: /* is there a null space? */
269: /* LATER add in ability to detect null space by checking alpha */
270: for (i=0, j=0; i<=level; i++) j+=nsep[i];
272: m = j-xyt_handle->ns;
273: if (m!=j) {
274: PetscPrintf(PETSC_COMM_WORLD,"xyt_generate() :: null space exists %D %D %D\n",m,j,xyt_handle->ns);
275: }
277: PetscInfo2(0,"xyt_generate() :: X(%D,%D)\n",n,m);
279: /* get and initialize storage for x local */
280: /* note that x local is nxm and stored by columns */
281: xcol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
282: xcol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
283: xcol_vals = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
284: for (i=j=0; i<m; i++, j+=2) {
285: xcol_indices[j]=xcol_indices[j+1]=xcol_sz[i]=-1;
286: xcol_vals[i] = NULL;
287: }
288: xcol_indices[j]=-1;
290: /* get and initialize storage for y local */
291: /* note that y local is nxm and stored by columns */
292: ycol_sz = (PetscInt*) malloc(m*sizeof(PetscInt));
293: ycol_indices = (PetscInt*) malloc((2*m+1)*sizeof(PetscInt));
294: ycol_vals = (PetscScalar**) malloc(m*sizeof(PetscScalar*));
295: for (i=j=0; i<m; i++, j+=2) {
296: ycol_indices[j]=ycol_indices[j+1]=ycol_sz[i]=-1;
297: ycol_vals[i] = NULL;
298: }
299: ycol_indices[j]=-1;
301: /* size of separators for each sub-hc working from bottom of tree to top */
302: /* this looks like nsep[]=segments */
303: stages = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
304: segs = (PetscInt*) malloc((level+1)*sizeof(PetscInt));
305: PCTFS_ivec_zero(stages,level+1);
306: PCTFS_ivec_copy(segs,nsep,level+1);
307: for (i=0; i<level; i++) segs[i+1] += segs[i];
308: stages[0] = segs[0];
310: /* temporary vectors */
311: u = (PetscScalar*) malloc(n*sizeof(PetscScalar));
312: z = (PetscScalar*) malloc(n*sizeof(PetscScalar));
313: v = (PetscScalar*) malloc(a_m*sizeof(PetscScalar));
314: uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
315: w = (PetscScalar*) malloc(m*sizeof(PetscScalar));
317: /* extra nnz due to replication of vertices across separators */
318: for (i=1, j=0; i<=level; i++) j+=nsep[i];
320: /* storage for sparse x values */
321: n_global = xyt_handle->info->n_global;
322: xt_max_nnz = yt_max_nnz = (PetscInt)(2.5*PetscPowReal(1.0*n_global,1.6667) + j*n/2)/PCTFS_num_nodes;
323: x = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
324: y = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
326: /* LATER - can embed next sep to fire in gs */
327: /* time to make the donuts - generate X factor */
328: for (dim=i=j=0; i<m; i++) {
329: /* time to move to the next level? */
330: while (i==segs[dim]) {
331: if (dim==level) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"dim about to exceed level\n");
332: stages[dim++]=i;
333: end +=lnsep[dim];
334: }
335: stages[dim]=i;
337: /* which column are we firing? */
338: /* i.e. set v_l */
339: /* use new seps and do global min across hc to determine which one to fire */
340: (start<end) ? (col=fo[start]) : (col=INT_MAX);
341: PCTFS_giop_hc(&col,&work,1,op2,dim);
343: /* shouldn't need this */
344: if (col==INT_MAX) {
345: PetscInfo(0,"hey ... col==INT_MAX??\n");
346: continue;
347: }
349: /* do I own it? I should */
350: PCTFS_rvec_zero(v,a_m);
351: if (col==fo[start]) {
352: start++;
353: idx=PCTFS_ivec_linear_search(col, a_local2global, a_n);
354: if (idx!=-1) {
355: v[idx] = 1.0;
356: j++;
357: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"NOT FOUND!\n");
358: } else {
359: idx=PCTFS_ivec_linear_search(col, a_local2global, a_m);
360: if (idx!=-1) v[idx] = 1.0;
361: }
363: /* perform u = A.v_l */
364: PCTFS_rvec_zero(u,n);
365: do_matvec(xyt_handle->mvi,v,u);
367: /* uu = X^T.u_l (local portion) */
368: /* technically only need to zero out first i entries */
369: /* later turn this into an XYT_solve call ? */
370: PCTFS_rvec_zero(uu,m);
371: y_ptr=y;
372: iptr = ycol_indices;
373: for (k=0; k<i; k++) {
374: off = *iptr++;
375: len = *iptr++;
376: PetscBLASIntCast(len,&dlen);
377: PetscStackCallBLAS("BLASdot",uu[k] = BLASdot_(&dlen,u+off,&i1,y_ptr,&i1));
378: y_ptr+=len;
379: }
381: /* uu = X^T.u_l (comm portion) */
382: PCTFS_ssgl_radd (uu, w, dim, stages);
384: /* z = X.uu */
385: PCTFS_rvec_zero(z,n);
386: x_ptr=x;
387: iptr = xcol_indices;
388: for (k=0; k<i; k++) {
389: off = *iptr++;
390: len = *iptr++;
391: PetscBLASIntCast(len,&dlen);
392: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&uu[k],x_ptr,&i1,z+off,&i1));
393: x_ptr+=len;
394: }
396: /* compute v_l = v_l - z */
397: PCTFS_rvec_zero(v+a_n,a_m-a_n);
398: PetscBLASIntCast(n,&dlen);
399: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,&dm1,z,&i1,v,&i1));
401: /* compute u_l = A.v_l */
402: if (a_n!=a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle,v,"+\0",dim);
403: PCTFS_rvec_zero(u,n);
404: do_matvec(xyt_handle->mvi,v,u);
406: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
407: PetscBLASIntCast(n,&dlen);
408: PetscStackCallBLAS("BLASdot",alpha = BLASdot_(&dlen,u,&i1,u,&i1));
409: /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
410: PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);
412: alpha = (PetscScalar) PetscSqrtReal((PetscReal)alpha);
414: /* check for small alpha */
415: /* LATER use this to detect and determine null space */
416: if (PetscAbsScalar(alpha)<1.0e-14) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"bad alpha! %g\n",alpha);
418: /* compute v_l = v_l/sqrt(alpha) */
419: PCTFS_rvec_scale(v,1.0/alpha,n);
420: PCTFS_rvec_scale(u,1.0/alpha,n);
422: /* add newly generated column, v_l, to X */
423: flag = 1;
424: off =len=0;
425: for (k=0; k<n; k++) {
426: if (v[k]!=0.0) {
427: len=k;
428: if (flag) {off=k; flag=0;}
429: }
430: }
432: len -= (off-1);
434: if (len>0) {
435: if ((xt_nnz+len)>xt_max_nnz) {
436: PetscInfo(0,"increasing space for X by 2x!\n");
437: xt_max_nnz *= 2;
438: x_ptr = (PetscScalar*) malloc(xt_max_nnz*sizeof(PetscScalar));
439: PCTFS_rvec_copy(x_ptr,x,xt_nnz);
440: free(x);
441: x = x_ptr;
442: x_ptr+=xt_nnz;
443: }
444: xt_nnz += len;
445: PCTFS_rvec_copy(x_ptr,v+off,len);
447: /* keep track of number of zeros */
448: if (dim) {
449: for (k=0; k<len; k++) {
450: if (x_ptr[k]==0.0) xt_zero_nnz++;
451: }
452: } else {
453: for (k=0; k<len; k++) {
454: if (x_ptr[k]==0.0) xt_zero_nnz_0++;
455: }
456: }
457: xcol_indices[2*i] = off;
458: xcol_sz[i] = xcol_indices[2*i+1] = len;
459: xcol_vals[i] = x_ptr;
460: } else {
461: xcol_indices[2*i] = 0;
462: xcol_sz[i] = xcol_indices[2*i+1] = 0;
463: xcol_vals[i] = x_ptr;
464: }
467: /* add newly generated column, u_l, to Y */
468: flag = 1;
469: off =len=0;
470: for (k=0; k<n; k++) {
471: if (u[k]!=0.0) {
472: len=k;
473: if (flag) { off=k; flag=0; }
474: }
475: }
477: len -= (off-1);
479: if (len>0) {
480: if ((yt_nnz+len)>yt_max_nnz) {
481: PetscInfo(0,"increasing space for Y by 2x!\n");
482: yt_max_nnz *= 2;
483: y_ptr = (PetscScalar*) malloc(yt_max_nnz*sizeof(PetscScalar));
484: PCTFS_rvec_copy(y_ptr,y,yt_nnz);
485: free(y);
486: y = y_ptr;
487: y_ptr+=yt_nnz;
488: }
489: yt_nnz += len;
490: PCTFS_rvec_copy(y_ptr,u+off,len);
492: /* keep track of number of zeros */
493: if (dim) {
494: for (k=0; k<len; k++) {
495: if (y_ptr[k]==0.0) yt_zero_nnz++;
496: }
497: } else {
498: for (k=0; k<len; k++) {
499: if (y_ptr[k]==0.0) yt_zero_nnz_0++;
500: }
501: }
502: ycol_indices[2*i] = off;
503: ycol_sz[i] = ycol_indices[2*i+1] = len;
504: ycol_vals[i] = y_ptr;
505: } else {
506: ycol_indices[2*i] = 0;
507: ycol_sz[i] = ycol_indices[2*i+1] = 0;
508: ycol_vals[i] = y_ptr;
509: }
510: }
512: /* close off stages for execution phase */
513: while (dim!=level) {
514: stages[dim++]=i;
515: PetscInfo2(0,"disconnected!!! dim(%D)!=level(%D)\n",dim,level);
516: }
517: stages[dim]=i;
519: xyt_handle->info->n =xyt_handle->mvi->n;
520: xyt_handle->info->m =m;
521: xyt_handle->info->nnz =xt_nnz + yt_nnz;
522: xyt_handle->info->max_nnz =xt_max_nnz + yt_max_nnz;
523: xyt_handle->info->msg_buf_sz =stages[level]-stages[0];
524: xyt_handle->info->solve_uu = (PetscScalar*) malloc(m*sizeof(PetscScalar));
525: xyt_handle->info->solve_w = (PetscScalar*) malloc(m*sizeof(PetscScalar));
526: xyt_handle->info->x =x;
527: xyt_handle->info->xcol_vals =xcol_vals;
528: xyt_handle->info->xcol_sz =xcol_sz;
529: xyt_handle->info->xcol_indices=xcol_indices;
530: xyt_handle->info->stages =stages;
531: xyt_handle->info->y =y;
532: xyt_handle->info->ycol_vals =ycol_vals;
533: xyt_handle->info->ycol_sz =ycol_sz;
534: xyt_handle->info->ycol_indices=ycol_indices;
536: free(segs);
537: free(u);
538: free(v);
539: free(uu);
540: free(z);
541: free(w);
543: return(0);
544: }
546: /**************************************xyt.c***********************************/
547: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
548: {
549: PetscInt off, len, *iptr;
550: PetscInt level =xyt_handle->level;
551: PetscInt n =xyt_handle->info->n;
552: PetscInt m =xyt_handle->info->m;
553: PetscInt *stages =xyt_handle->info->stages;
554: PetscInt *xcol_indices=xyt_handle->info->xcol_indices;
555: PetscInt *ycol_indices=xyt_handle->info->ycol_indices;
556: PetscScalar *x_ptr, *y_ptr, *uu_ptr;
557: PetscScalar *solve_uu=xyt_handle->info->solve_uu;
558: PetscScalar *solve_w =xyt_handle->info->solve_w;
559: PetscScalar *x =xyt_handle->info->x;
560: PetscScalar *y =xyt_handle->info->y;
561: PetscBLASInt i1 = 1,dlen;
565: uu_ptr=solve_uu;
566: PCTFS_rvec_zero(uu_ptr,m);
568: /* x = X.Y^T.b */
569: /* uu = Y^T.b */
570: for (y_ptr=y,iptr=ycol_indices; *iptr!=-1; y_ptr+=len) {
571: off =*iptr++;
572: len =*iptr++;
573: PetscBLASIntCast(len,&dlen);
574: PetscStackCallBLAS("BLASdot",*uu_ptr++ = BLASdot_(&dlen,uc+off,&i1,y_ptr,&i1));
575: }
577: /* comunication of beta */
578: uu_ptr=solve_uu;
579: if (level) PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);
580: PCTFS_rvec_zero(uc,n);
582: /* x = X.uu */
583: for (x_ptr=x,iptr=xcol_indices; *iptr!=-1; x_ptr+=len) {
584: off =*iptr++;
585: len =*iptr++;
586: PetscBLASIntCast(len,&dlen);
587: PetscStackCallBLAS("BLASaxpy",BLASaxpy_(&dlen,uu_ptr++,x_ptr,&i1,uc+off,&i1));
588: }
589: return(0);
590: }
592: /**************************************xyt.c***********************************/
593: static PetscErrorCode check_handle(xyt_ADT xyt_handle)
594: {
595: PetscInt vals[2], work[2], op[] = {NON_UNIFORM,GL_MIN,GL_MAX};
598: if (!xyt_handle) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: NULL %D\n",xyt_handle);
600: vals[0]=vals[1]=xyt_handle->id;
601: PCTFS_giop(vals,work,sizeof(op)/sizeof(op[0])-1,op);
602: if ((vals[0]!=vals[1])||(xyt_handle->id<=0)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"check_handle() :: bad handle :: id mismatch min/max %D/%D %D\n", vals[0],vals[1], xyt_handle->id);
603: return(0);
604: }
606: /**************************************xyt.c***********************************/
607: static PetscErrorCode det_separators(xyt_ADT xyt_handle)
608: {
609: PetscInt i, ct, id;
610: PetscInt mask, edge, *iptr;
611: PetscInt *dir, *used;
612: PetscInt sum[4], w[4];
613: PetscScalar rsum[4], rw[4];
614: PetscInt op[] = {GL_ADD,0};
615: PetscScalar *lhs, *rhs;
616: PetscInt *nsep, *lnsep, *fo, nfo=0;
617: PCTFS_gs_ADT PCTFS_gs_handle=xyt_handle->mvi->PCTFS_gs_handle;
618: PetscInt *local2global =xyt_handle->mvi->local2global;
619: PetscInt n =xyt_handle->mvi->n;
620: PetscInt m =xyt_handle->mvi->m;
621: PetscInt level =xyt_handle->level;
622: PetscInt shared =0;
626: dir = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
627: nsep = (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
628: lnsep= (PetscInt*)malloc(sizeof(PetscInt)*(level+1));
629: fo = (PetscInt*)malloc(sizeof(PetscInt)*(n+1));
630: used = (PetscInt*)malloc(sizeof(PetscInt)*n);
632: PCTFS_ivec_zero(dir,level+1);
633: PCTFS_ivec_zero(nsep,level+1);
634: PCTFS_ivec_zero(lnsep,level+1);
635: PCTFS_ivec_set (fo,-1,n+1);
636: PCTFS_ivec_zero(used,n);
638: lhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
639: rhs = (PetscScalar*)malloc(sizeof(PetscScalar)*m);
641: /* determine the # of unique dof */
642: PCTFS_rvec_zero(lhs,m);
643: PCTFS_rvec_set(lhs,1.0,n);
644: PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",level);
645: PetscInfo(0,"done first PCTFS_gs_gop_hc\n");
646: PCTFS_rvec_zero(rsum,2);
647: for (i=0; i<n; i++) {
648: if (lhs[i]!=0.0) { rsum[0]+=1.0/lhs[i]; rsum[1]+=lhs[i]; }
649: if (lhs[i]!=1.0) shared=1;
650: }
652: PCTFS_grop_hc(rsum,rw,2,op,level);
653: rsum[0]+=0.1;
654: rsum[1]+=0.1;
656: xyt_handle->info->n_global=xyt_handle->info->m_global=(PetscInt) rsum[0];
657: xyt_handle->mvi->n_global =xyt_handle->mvi->m_global =(PetscInt) rsum[0];
659: /* determine separator sets top down */
660: if (shared) {
661: /* solution is to do as in the symmetric shared case but then */
662: /* pick the sub-hc with the most free dofs and do a mat-vec */
663: /* and pick up the responses on the other sub-hc from the */
664: /* initial separator set obtained from the symm. shared case */
665: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"shared dof separator determination not ready ... see hmt!!!\n");
666: /* [dead code deleted since it is unlikely to be completed] */
667: } else {
668: for (iptr=fo+n,id=PCTFS_my_id,mask=PCTFS_num_nodes>>1,edge=level;edge>0;edge--,mask>>=1) {
669: /* set rsh of hc, fire, and collect lhs responses */
670: (id<mask) ? PCTFS_rvec_zero(lhs,m) : PCTFS_rvec_set(lhs,1.0,m);
671: PCTFS_gs_gop_hc(PCTFS_gs_handle,lhs,"+\0",edge);
673: /* set lsh of hc, fire, and collect rhs responses */
674: (id<mask) ? PCTFS_rvec_set(rhs,1.0,m) : PCTFS_rvec_zero(rhs,m);
675: PCTFS_gs_gop_hc(PCTFS_gs_handle,rhs,"+\0",edge);
677: /* count number of dofs I own that have signal and not in sep set */
678: for (PCTFS_ivec_zero(sum,4),ct=i=0;i<n;i++) {
679: if (!used[i]) {
680: /* number of unmarked dofs on node */
681: ct++;
682: /* number of dofs to be marked on lhs hc */
683: if ((id< mask)&&(lhs[i]!=0.0)) sum[0]++;
684: /* number of dofs to be marked on rhs hc */
685: if ((id>=mask)&&(rhs[i]!=0.0)) sum[1]++;
686: }
687: }
689: /* for the non-symmetric case we need separators of width 2 */
690: /* so take both sides */
691: (id<mask) ? (sum[2]=ct) : (sum[3]=ct);
692: PCTFS_giop_hc(sum,w,4,op,edge);
694: ct=0;
695: if (id<mask) {
696: /* mark dofs I own that have signal and not in sep set */
697: for (i=0;i<n;i++) {
698: if ((!used[i])&&(lhs[i]!=0.0)) {
699: ct++; nfo++;
700: *--iptr = local2global[i];
701: used[i] =edge;
702: }
703: }
704: /* LSH hc summation of ct should be sum[0] */
705: } else {
706: /* mark dofs I own that have signal and not in sep set */
707: for (i=0; i<n; i++) {
708: if ((!used[i])&&(rhs[i]!=0.0)) {
709: ct++; nfo++;
710: *--iptr = local2global[i];
711: used[i] = edge;
712: }
713: }
714: /* RSH hc summation of ct should be sum[1] */
715: }
717: if (ct>1) PCTFS_ivec_sort(iptr,ct);
718: lnsep[edge]=ct;
719: nsep[edge] =sum[0]+sum[1];
720: dir [edge] =BOTH;
722: /* LATER or we can recur on these to order seps at this level */
723: /* do we need full set of separators for this? */
725: /* fold rhs hc into lower */
726: if (id>=mask) id-=mask;
727: }
728: }
730: /* level 0 is on processor case - so mark the remainder */
731: for (ct=i=0;i<n;i++) {
732: if (!used[i]) {
733: ct++; nfo++;
734: *--iptr = local2global[i];
735: used[i] = edge;
736: }
737: }
738: if (ct>1) PCTFS_ivec_sort(iptr,ct);
739: lnsep[edge]=ct;
740: nsep [edge]=ct;
741: dir [edge]=BOTH;
743: xyt_handle->info->nsep = nsep;
744: xyt_handle->info->lnsep = lnsep;
745: xyt_handle->info->fo = fo;
746: xyt_handle->info->nfo = nfo;
748: free(dir);
749: free(lhs);
750: free(rhs);
751: free(used);
752: return(0);
753: }
755: /**************************************xyt.c***********************************/
756: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info*,PetscScalar*,PetscScalar*), void *grid_data)
757: {
758: mv_info *mvi;
760: mvi = (mv_info*)malloc(sizeof(mv_info));
761: mvi->n = n;
762: mvi->m = m;
763: mvi->n_global = -1;
764: mvi->m_global = -1;
765: mvi->local2global= (PetscInt*)malloc((m+1)*sizeof(PetscInt));
767: PCTFS_ivec_copy(mvi->local2global,local2global,m);
768: mvi->local2global[m] = INT_MAX;
769: mvi->matvec = matvec;
770: mvi->grid_data = grid_data;
772: /* set xyt communication handle to perform restricted matvec */
773: mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
775: return(mvi);
776: }
778: /**************************************xyt.c***********************************/
779: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
780: {
782: A->matvec((mv_info*)A->grid_data,v,u);
783: return(0);
784: }