Actual source code: ex6.c
petsc-3.3-p7 2013-05-11
2: static char help[] = "Demonstrates using 3 DMDA's to manage a slightly non-trivial grid";
4: #include <petscdmda.h>
6: struct _p_FA {
7: MPI_Comm comm[3];
8: PetscInt xl[3],yl[3],ml[3],nl[3]; /* corners and sizes of local vector in DMDA */
9: PetscInt xg[3],yg[3],mg[3],ng[3]; /* corners and sizes of global vector in DMDA */
10: PetscInt offl[3],offg[3]; /* offset in local and global vector of region 1, 2 and 3 portions */
11: Vec g,l;
12: VecScatter vscat;
13: PetscInt p1,p2,r1,r2,r1g,r2g,sw;
14: };
15: typedef struct _p_FA *FA;
17: typedef struct {
18: PetscScalar X;
19: PetscScalar Y;
20: } Field;
22: PetscErrorCode FAGetLocalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
23: {
25: if (fa->comm[j]) {
26: *x = fa->xl[j];
27: *y = fa->yl[j];
28: *m = fa->ml[j];
29: *n = fa->nl[j];
30: } else {
31: *x = *y = *m = *n = 0;
32: }
33: return(0);
34: }
36: PetscErrorCode FAGetGlobalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
37: {
39: if (fa->comm[j]) {
40: *x = fa->xg[j];
41: *y = fa->yg[j];
42: *m = fa->mg[j];
43: *n = fa->ng[j];
44: } else {
45: *x = *y = *m = *n = 0;
46: }
47: return(0);
48: }
50: PetscErrorCode FAGetLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
51: {
53: PetscScalar *va;
54: PetscInt i;
55: Field **a;
58: if (fa->comm[j]) {
59: VecGetArray(v,&va);
60: PetscMalloc(fa->nl[j]*sizeof(Field*),&a);
61: for (i=0; i<fa->nl[j]; i++) (a)[i] = (Field*) (va + 2*fa->offl[j] + i*2*fa->ml[j] - 2*fa->xl[j]);
62: *f = a - fa->yl[j];
63: VecRestoreArray(v,&va);
64: } else {
65: *f = 0;
66: }
67: return(0);
68: }
70: PetscErrorCode FARestoreLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
71: {
73: void *dummy;
76: if (fa->comm[j]) {
77: dummy = *f + fa->yl[j];
78: PetscFree(dummy);
79: }
80: return(0);
81: }
83: PetscErrorCode FAGetGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
84: {
86: PetscScalar *va;
87: PetscInt i;
88: Field **a;
91: if (fa->comm[j]) {
92: VecGetArray(v,&va);
93: PetscMalloc(fa->ng[j]*sizeof(Field*),&a);
94: for (i=0; i<fa->ng[j]; i++) (a)[i] = (Field*) (va + 2*fa->offg[j] + i*2*fa->mg[j] - 2*fa->xg[j]);
95: *f = a - fa->yg[j];
96: VecRestoreArray(v,&va);
97: } else {
98: *f = 0;
99: }
100: return(0);
101: }
103: PetscErrorCode FARestoreGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
104: {
106: void *dummy;
109: if (fa->comm[j]) {
110: dummy = *f + fa->yg[j];
111: PetscFree(dummy);
112: }
113: return(0);
114: }
116: PetscErrorCode FAGetGlobalVector(FA fa,Vec *v)
117: {
120: VecDuplicate(fa->g,v);
121: return(0);
122: }
124: PetscErrorCode FAGetLocalVector(FA fa,Vec *v)
125: {
128: VecDuplicate(fa->l,v);
129: return(0);
130: }
132: PetscErrorCode FAGlobalToLocal(FA fa,Vec g,Vec l)
133: {
136: VecScatterBegin(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
137: VecScatterEnd(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
138: return(0);
139: }
141: PetscErrorCode FADestroy(FA *fa)
142: {
146: VecDestroy(&(*fa)->g);
147: VecDestroy(&(*fa)->l);
148: VecScatterDestroy(&(*fa)->vscat);
149: PetscFree(*fa);
150: return(0);
151: }
153: PetscErrorCode FACreate(FA *infa)
154: {
155: FA fa;
156: PetscMPIInt rank;
157: PetscInt tonglobal,globalrstart,x,nx,y,ny,*tonatural,i,j,*to,*from,offt[3];
158: PetscInt *fromnatural,fromnglobal,nscat,nlocal,cntl1,cntl2,cntl3,*indices;
161: /* Each DMDA manages the local vector for the portion of region 1, 2, and 3 for that processor
162: Each DMDA can belong on any subset (overlapping between DMDA's or not) of processors
163: For processes that a particular DMDA does not exist on, the corresponding comm should be set to zero
164: */
165: DM da1 = 0,da2 = 0,da3 = 0;
166: /*
167: v1, v2, v3 represent the local vector for a single DMDA
168: */
169: Vec vl1 = 0,vl2 = 0,vl3 = 0, vg1 = 0, vg2 = 0,vg3 = 0;
171: /*
172: globalvec and friends represent the global vectors that are used for the PETSc solvers
173: localvec represents the concatenation of the (up to) 3 local vectors; vl1, vl2, vl3
175: tovec and friends represent intermediate vectors that are ONLY used for setting up the
176: final communication patterns. Once this setup routine is complete they are destroyed.
177: The tovec is like the globalvec EXCEPT it has redundant locations for the ghost points
178: between regions 2+3 and 1.
179: */
180: AO toao,globalao;
181: IS tois,globalis,is;
182: Vec tovec,globalvec,localvec;
183: VecScatter vscat;
184: PetscScalar *globalarray,*localarray,*toarray;
186: PetscNew(struct _p_FA,&fa);
187: /*
188: fa->sw is the stencil width
190: fa->p1 is the width of region 1, fa->p2 the width of region 2 (must be the same)
191: fa->r1 height of region 1
192: fa->r2 height of region 2
193:
194: fa->r2 is also the height of region 3-4
195: (fa->p1 - fa->p2)/2 is the width of both region 3 and region 4
196: */
197: fa->p1 = 24;
198: fa->p2 = 15;
199: fa->r1 = 6;
200: fa->r2 = 6;
201: fa->sw = 1;
202: fa->r1g = fa->r1 + fa->sw;
203: fa->r2g = fa->r2 + fa->sw;
205: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
207: fa->comm[0] = PETSC_COMM_WORLD;
208: fa->comm[1] = PETSC_COMM_WORLD;
209: fa->comm[2] = PETSC_COMM_WORLD;
210: /* Test case with different communicators */
211: /* Normally one would use MPI_Comm routines to build MPI communicators on which you wish to partition the DMDAs*/
212: /*
213: if (rank == 0) {
214: fa->comm[0] = PETSC_COMM_SELF;
215: fa->comm[1] = 0;
216: fa->comm[2] = 0;
217: } else if (rank == 1) {
218: fa->comm[0] = 0;
219: fa->comm[1] = PETSC_COMM_SELF;
220: fa->comm[2] = 0;
221: } else {
222: fa->comm[0] = 0;
223: fa->comm[1] = 0;
224: fa->comm[2] = PETSC_COMM_SELF;
225: } */
227: if (fa->p2 > fa->p1 - 3) SETERRQ(PETSC_COMM_SELF,1,"Width of region fa->p2 must be at least 3 less then width of region 1");
228: if (!((fa->p2 - fa->p1) % 2)) SETERRQ(PETSC_COMM_SELF,1,"width of region 3 must NOT be divisible by 2");
230: if (fa->comm[1]) {
231: DMDACreate2d(fa->comm[1],DMDA_BOUNDARY_PERIODIC,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,PETSC_NULL,PETSC_NULL,&da2);
232: DMGetLocalVector(da2,&vl2);
233: DMGetGlobalVector(da2,&vg2);
234: }
235: if (fa->comm[2]) {
236: DMDACreate2d(fa->comm[2],DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1-fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,PETSC_NULL,PETSC_NULL,&da3);
237: DMGetLocalVector(da3,&vl3);
238: DMGetGlobalVector(da3,&vg3);
239: }
240: if (fa->comm[0]) {
241: DMDACreate2d(fa->comm[0],DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1,fa->r1g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,PETSC_NULL,PETSC_NULL,&da1);
242: DMGetLocalVector(da1,&vl1);
243: DMGetGlobalVector(da1,&vg1);
244: }
246: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
247: for global vector with redundancy */
248: tonglobal = 0;
249: if (fa->comm[1]) {
250: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
251: tonglobal += nx*ny;
252: }
253: if (fa->comm[2]) {
254: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
255: tonglobal += nx*ny;
256: }
257: if (fa->comm[0]) {
258: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
259: tonglobal += nx*ny;
260: }
261: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,tonglobal);
262: PetscSynchronizedFlush(PETSC_COMM_WORLD);
263:
264: /* Get tonatural number for each node */
265: PetscMalloc((tonglobal+1)*sizeof(PetscInt),&tonatural);
266: tonglobal = 0;
267: if (fa->comm[1]) {
268: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
269: for (j=0; j<ny; j++) {
270: for (i=0; i<nx; i++) {
271: tonatural[tonglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
272: }
273: }
274: }
275: if (fa->comm[2]) {
276: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
277: for (j=0; j<ny; j++) {
278: for (i=0; i<nx; i++) {
279: if (x + i < (fa->p1 - fa->p2)/2) tonatural[tonglobal++] = x + i + fa->p1*(y + j);
280: else tonatural[tonglobal++] = fa->p2 + x + i + fa->p1*(y + j);
281: }
282: }
283: }
284: if (fa->comm[0]) {
285: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
286: for (j=0; j<ny; j++) {
287: for (i=0; i<nx; i++) {
288: tonatural[tonglobal++] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
289: }
290: }
291: }
292: /* PetscIntView(tonglobal,tonatural,PETSC_VIEWER_STDOUT_WORLD); */
293: AOCreateBasic(PETSC_COMM_WORLD,tonglobal,tonatural,0,&toao);
294: PetscFree(tonatural);
296: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
297: for global vector without redundancy */
298: fromnglobal = 0;
299: fa->offg[1] = 0;
300: offt[1] = 0;
301: if (fa->comm[1]) {
302: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
303: offt[2] = nx*ny;
304: if (y+ny == fa->r2g) {ny--;} /* includes the ghost points on the upper side */
305: fromnglobal += nx*ny;
306: fa->offg[2] = fromnglobal;
307: } else {
308: offt[2] = 0;
309: fa->offg[2] = 0;
310: }
311: if (fa->comm[2]) {
312: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
313: offt[0] = offt[2] + nx*ny;
314: if (y+ny == fa->r2g) {ny--;} /* includes the ghost points on the upper side */
315: fromnglobal += nx*ny;
316: fa->offg[0] = fromnglobal;
317: } else {
318: offt[0] = offt[2];
319: fa->offg[0] = fromnglobal;
320: }
321: if (fa->comm[0]) {
322: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
323: if (y == 0) {ny--;} /* includes the ghost points on the lower side */
324: fromnglobal += nx*ny;
325: }
326: MPI_Scan(&fromnglobal,&globalrstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
327: globalrstart -= fromnglobal;
328: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,fromnglobal);
329: PetscSynchronizedFlush(PETSC_COMM_WORLD);
331: /* Get fromnatural number for each node */
332: PetscMalloc((fromnglobal+1)*sizeof(PetscInt),&fromnatural);
333: fromnglobal = 0;
334: if (fa->comm[1]) {
335: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
336: if (y+ny == fa->r2g) {ny--;} /* includes the ghost points on the upper side */
337: fa->xg[1] = x; fa->yg[1] = y; fa->mg[1] = nx; fa->ng[1] = ny;
338: DMDAGetGhostCorners(da2,&fa->xl[1],&fa->yl[1],0,&fa->ml[1],&fa->nl[1],0);
339: for (j=0; j<ny; j++) {
340: for (i=0; i<nx; i++) {
341: fromnatural[fromnglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
342: }
343: }
344: }
345: if (fa->comm[2]) {
346: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
347: if (y+ny == fa->r2g) {ny--;} /* includes the ghost points on the upper side */
348: fa->xg[2] = x; fa->yg[2] = y; fa->mg[2] = nx; fa->ng[2] = ny;
349: DMDAGetGhostCorners(da3,&fa->xl[2],&fa->yl[2],0,&fa->ml[2],&fa->nl[2],0);
350: for (j=0; j<ny; j++) {
351: for (i=0; i<nx; i++) {
352: if (x + i < (fa->p1 - fa->p2)/2) fromnatural[fromnglobal++] = x + i + fa->p1*(y + j);
353: else fromnatural[fromnglobal++] = fa->p2 + x + i + fa->p1*(y + j);
354: }
355: }
356: }
357: if (fa->comm[0]) {
358: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
359: if (y == 0) {ny--;} /* includes the ghost points on the lower side */
360: else y--;
361: fa->xg[0] = x; fa->yg[0] = y; fa->mg[0] = nx; fa->ng[0] = ny;
362: DMDAGetGhostCorners(da1,&fa->xl[0],&fa->yl[0],0,&fa->ml[0],&fa->nl[0],0);
363: for (j=0; j<ny; j++) {
364: for (i=0; i<nx; i++) {
365: fromnatural[fromnglobal++] = fa->p1*fa->r2 + x + i + fa->p1*(y + j);
366: }
367: }
368: }
369: /*PetscIntView(fromnglobal,fromnatural,PETSC_VIEWER_STDOUT_WORLD);*/
370: AOCreateBasic(PETSC_COMM_WORLD,fromnglobal,fromnatural,0,&globalao);
371: PetscFree(fromnatural);
373: /* ---------------------------------------------------*/
374: /* Create the scatter that updates 1 from 2 and 3 and 3 and 2 from 1 */
375: /* currently handles stencil width of 1 ONLY */
376: PetscMalloc(tonglobal*sizeof(PetscInt),&to);
377: PetscMalloc(tonglobal*sizeof(PetscInt),&from);
378: nscat = 0;
379: if (fa->comm[1]) {
380: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
381: for (j=0; j<ny; j++) {
382: for (i=0; i<nx; i++) {
383: to[nscat] = from[nscat] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);nscat++;
384: }
385: }
386: }
387: if (fa->comm[2]) {
388: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
389: for (j=0; j<ny; j++) {
390: for (i=0; i<nx; i++) {
391: if (x + i < (fa->p1 - fa->p2)/2) {
392: to[nscat] = from[nscat] = x + i + fa->p1*(y + j);nscat++;
393: } else {
394: to[nscat] = from[nscat] = fa->p2 + x + i + fa->p1*(y + j);nscat++;
395: }
396: }
397: }
398: }
399: if (fa->comm[0]) {
400: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
401: for (j=0; j<ny; j++) {
402: for (i=0; i<nx; i++) {
403: to[nscat] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
404: from[nscat++] = fa->p1*(fa->r2 - 1) + x + i + fa->p1*(y + j);
405: }
406: }
407: }
408: AOApplicationToPetsc(toao,nscat,to);
409: AOApplicationToPetsc(globalao,nscat,from);
410: ISCreateGeneral(PETSC_COMM_WORLD,nscat,to,PETSC_COPY_VALUES,&tois);
411: ISCreateGeneral(PETSC_COMM_WORLD,nscat,from,PETSC_COPY_VALUES,&globalis);
412: PetscFree(to);
413: PetscFree(from);
414: VecCreateMPI(PETSC_COMM_WORLD,tonglobal,PETSC_DETERMINE,&tovec);
415: VecCreateMPI(PETSC_COMM_WORLD,fromnglobal,PETSC_DETERMINE,&globalvec);
416: VecScatterCreate(globalvec,globalis,tovec,tois,&vscat);
417: ISDestroy(&tois);
418: ISDestroy(&globalis);
419: AODestroy(&globalao);
420: AODestroy(&toao);
422: /* fill up global vector without redundant values with PETSc global numbering */
423: VecGetArray(globalvec,&globalarray);
424: for (i=0; i<fromnglobal; i++) {
425: globalarray[i] = globalrstart + i;
426: }
427: VecRestoreArray(globalvec,&globalarray);
428:
429: /* scatter PETSc global indices to redundant valueed array */
430: VecScatterBegin(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
431: VecScatterEnd(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
432:
433: /* Create local vector that is the concatenation of the local vectors */
434: nlocal = 0;
435: cntl1 = cntl2 = cntl3 = 0;
436: if (fa->comm[1]) {
437: VecGetSize(vl2,&cntl2);
438: nlocal += cntl2;
439: }
440: if (fa->comm[2]) {
441: VecGetSize(vl3,&cntl3);
442: nlocal += cntl3;
443: }
444: if (fa->comm[0]) {
445: VecGetSize(vl1,&cntl1);
446: nlocal += cntl1;
447: }
448: fa->offl[0] = cntl2 + cntl3;
449: fa->offl[1] = 0;
450: fa->offl[2] = cntl2;
451: VecCreateSeq(PETSC_COMM_SELF,nlocal,&localvec);
452:
453: /* cheat so that vl1, vl2, vl3 shared array memory with localvec */
454: VecGetArray(localvec,&localarray);
455: VecGetArray(tovec,&toarray);
456: if (fa->comm[1]) {
457: VecPlaceArray(vl2,localarray+fa->offl[1]);
458: VecPlaceArray(vg2,toarray+offt[1]);
459: DMGlobalToLocalBegin(da2,vg2,INSERT_VALUES,vl2);
460: DMGlobalToLocalEnd(da2,vg2,INSERT_VALUES,vl2);
461: DMRestoreGlobalVector(da2,&vg2);
462: }
463: if (fa->comm[2]) {
464: VecPlaceArray(vl3,localarray+fa->offl[2]);
465: VecPlaceArray(vg3,toarray+offt[2]);
466: DMGlobalToLocalBegin(da3,vg3,INSERT_VALUES,vl3);
467: DMGlobalToLocalEnd(da3,vg3,INSERT_VALUES,vl3);
468: DMRestoreGlobalVector(da3,&vg3);
469: }
470: if (fa->comm[0]) {
471: VecPlaceArray(vl1,localarray+fa->offl[0]);
472: VecPlaceArray(vg1,toarray+offt[0]);
473: DMGlobalToLocalBegin(da1,vg1,INSERT_VALUES,vl1);
474: DMGlobalToLocalEnd(da1,vg1,INSERT_VALUES,vl1);
475: DMRestoreGlobalVector(da1,&vg1);
476: }
477: VecRestoreArray(localvec,&localarray);
478: VecRestoreArray(tovec,&toarray);
480: /* no longer need the redundant vector and VecScatter to it */
481: VecScatterDestroy(&vscat);
482: VecDestroy(&tovec);
484: /* Create final scatter that goes directly from globalvec to localvec */
485: /* this is the one to be used in the application code */
486: PetscMalloc(nlocal*sizeof(PetscInt),&indices);
487: VecGetArray(localvec,&localarray);
488: for (i=0; i<nlocal; i++) {
489: indices[i] = (PetscInt) (localarray[i]);
490: }
491: VecRestoreArray(localvec,&localarray);
492: ISCreateBlock(PETSC_COMM_WORLD,2,nlocal,indices,PETSC_COPY_VALUES,&is);
493: PetscFree(indices);
495: VecCreateSeq(PETSC_COMM_SELF,2*nlocal,&fa->l);
496: VecCreateMPI(PETSC_COMM_WORLD,2*fromnglobal,PETSC_DETERMINE,&fa->g);
498: VecScatterCreate(fa->g,is,fa->l,PETSC_NULL,&fa->vscat);
499: ISDestroy(&is);
501: VecDestroy(&globalvec);
502: VecDestroy(&localvec);
503: if (fa->comm[0]) {
504: DMRestoreLocalVector(da1,&vl1);
505: DMDestroy(&da1);
506: }
507: if (fa->comm[1]) {
508: DMRestoreLocalVector(da2,&vl2);
509: DMDestroy(&da2);
510: }
511: if (fa->comm[2]) {
512: DMRestoreLocalVector(da3,&vl3);
513: DMDestroy(&da3);
514: }
515: *infa = fa;
516: return(0);
517: }
519: /* Crude graphics to test that the ghost points are properly updated */
520: #include <petscdraw.h>
522: typedef struct {
523: PetscInt m[3],n[3];
524: PetscScalar *xy[3];
525: } ZoomCtx;
527: PetscErrorCode DrawPatch(PetscDraw draw,void *ctx)
528: {
529: ZoomCtx *zctx = (ZoomCtx*)ctx;
531: PetscInt m,n,i,j,id,k;
532: PetscReal x1,x2,x3,x4,y_1,y2,y3,y4;
533: PetscScalar *xy;
536: for (k=0; k<3; k++) {
537: m = zctx->m[k];
538: n = zctx->n[k];
539: xy = zctx->xy[k];
541: for (j=0; j<n-1; j++) {
542: for (i=0; i<m-1; i++) {
543: id = i+j*m; x1 = xy[2*id];y_1 = xy[2*id+1];
544: id = i+j*m+1; x2 = xy[2*id];y2 = xy[2*id+1];
545: id = i+j*m+1+m;x3 = xy[2*id];y3 = xy[2*id+1];
546: id = i+j*m+m; x4 = xy[2*id];y4 = xy[2*id+1];
547: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
548: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
549: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
550: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
551: }
552: }
553: }
554: PetscDrawFlush(draw);
555: return(0);
556: }
558: PetscErrorCode DrawFA(FA fa,Vec v)
559: {
561: PetscScalar *va;
562: ZoomCtx zctx;
563: PetscDraw draw;
564: PetscReal xmint = 10000.0,xmaxt = -10000.0,ymint = 100000.0,ymaxt = -10000.0;
565: PetscReal xmin,xmax,ymin,ymax;
566: PetscInt i,vn,ln,j;
569: VecGetArray(v,&va);
570: VecGetSize(v,&vn);
571: VecGetSize(fa->l,&ln);
572: for (j=0; j<3; j++) {
573: if (vn == ln) {
574: zctx.xy[j] = va + 2*fa->offl[j];
575: zctx.m[j] = fa->ml[j];
576: zctx.n[j] = fa->nl[j];
577: } else {
578: zctx.xy[j] = va + 2*fa->offg[j];
579: zctx.m[j] = fa->mg[j];
580: zctx.n[j] = fa->ng[j];
581: }
582: for (i=0; i<zctx.m[j]*zctx.n[j]; i++) {
583: if (zctx.xy[j][2*i] > xmax) xmax = zctx.xy[j][2*i];
584: if (zctx.xy[j][2*i] < xmin) xmin = zctx.xy[j][2*i];
585: if (zctx.xy[j][2*i+1] > ymax) ymax = zctx.xy[j][2*i+1];
586: if (zctx.xy[j][2*i+1] < ymin) ymin = zctx.xy[j][2*i+1];
587: }
588: }
589: MPI_Allreduce(&xmin,&xmint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
590: MPI_Allreduce(&xmax,&xmaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
591: MPI_Allreduce(&ymin,&ymint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
592: MPI_Allreduce(&ymax,&ymaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
593: xmin = xmint - .2*(xmaxt - xmint);
594: xmax = xmaxt + .2*(xmaxt - xmint);
595: ymin = ymint - .2*(ymaxt - ymint);
596: ymax = ymaxt + .2*(ymaxt - ymint);
597: #if defined(PETSC_HAVE_X)
598: PetscDrawOpenX(PETSC_COMM_WORLD,0,"meshes",PETSC_DECIDE,PETSC_DECIDE,700,700,&draw);
599: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
600: PetscDrawZoom(draw,DrawPatch,&zctx);
601: VecRestoreArray(v,&va);
602: PetscDrawDestroy(&draw);
603: #endif
604: return(0);
605: }
607: /* crude mappings from rectangular arrays to the true geometry. These are ONLY for testing!
608: they will not be used the actual code */
609: PetscErrorCode FAMapRegion3(FA fa,Vec g)
610: {
612: PetscReal R = 1.0,Rscale,Ascale;
613: PetscInt i,k,x,y,m,n;
614: Field **ga;
617: Rscale = R/(fa->r2-1);
618: Ascale = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
620: FAGetGlobalCorners(fa,2,&x,&y,&m,&n);
621: FAGetGlobalArray(fa,g,2,&ga);
622: for (k=y; k<y+n; k++) {
623: for (i=x; i<x+m; i++) {
624: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(1.*PETSC_PI/6. + i*Ascale);
625: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(1.*PETSC_PI/6. + i*Ascale) - 4.*R;
626: }
627: }
628: FARestoreGlobalArray(fa,g,2,&ga);
629: return(0);
630: }
632: PetscErrorCode FAMapRegion2(FA fa,Vec g)
633: {
635: PetscReal R = 1.0,Rscale,Ascale;
636: PetscInt i,k,x,y,m,n;
637: Field **ga;
640: Rscale = R/(fa->r2-1);
641: Ascale = 2.0*PETSC_PI/fa->p2;
643: FAGetGlobalCorners(fa,1,&x,&y,&m,&n);
644: FAGetGlobalArray(fa,g,1,&ga);
645: for (k=y; k<y+n; k++) {
646: for (i=x; i<x+m; i++) {
647: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(i*Ascale - PETSC_PI/2.0);
648: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(i*Ascale - PETSC_PI/2.0);
649: }
650: }
651: FARestoreGlobalArray(fa,g,1,&ga);
652: return(0);
653: }
655: PetscErrorCode FAMapRegion1(FA fa,Vec g)
656: {
658: PetscReal R = 1.0,Rscale,Ascale1,Ascale3;
659: PetscInt i,k,x,y,m,n;
660: Field **ga;
663: Rscale = R/(fa->r1-1);
664: Ascale1 = 2.0*PETSC_PI/fa->p2;
665: Ascale3 = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
667: FAGetGlobalCorners(fa,0,&x,&y,&m,&n);
668: FAGetGlobalArray(fa,g,0,&ga);
670: /* This mapping is WRONG! Not sure how to do it so I've done a poor job of
671: it. You can see that the grid connections are correct. */
672: for (k=y; k<y+n; k++) {
673: for (i=x; i<x+m; i++) {
674: if (i < (fa->p1-fa->p2)/2) {
675: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(i*Ascale3);
676: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(i*Ascale3) - 4.*R;
677: } else if (i > fa->p2 + (fa->p1 - fa->p2)/2) {
678: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(PETSC_PI+i*Ascale3);
679: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(PETSC_PI+i*Ascale3) - 4.*R;
680: } else {
681: ga[k][i].X = (2.*R + k*Rscale)*PetscCosScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
682: ga[k][i].Y = (2.*R + k*Rscale)*PetscSinScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
683: }
684: }
685: }
686: FARestoreGlobalArray(fa,g,0,&ga);
687: return(0);
688: }
690: /* Simple test to check that the ghost points are properly updated */
691: PetscErrorCode FATest(FA fa)
692: {
694: Vec l,g;
695: Field **la;
696: PetscInt x,y,m,n,j,i,k,p;
697: PetscMPIInt rank;
700: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
702: FAGetGlobalVector(fa,&g);
703: FAGetLocalVector(fa,&l);
705: /* fill up global vector of one region at a time with ITS logical coordinates, then update LOCAL
706: vector; print local vectors to confirm they are correctly filled */
707: for (j=0; j<3; j++) {
708: VecSet(g,0.0);
709: FAGetGlobalCorners(fa,j,&x,&y,&m,&n);
710: PetscPrintf(PETSC_COMM_WORLD,"\nFilling global region %d, showing local results \n",j+1);
711: FAGetGlobalArray(fa,g,j,&la);
712: for (k=y; k<y+n; k++) {
713: for (i=x; i<x+m; i++) {
714: la[k][i].X = i;
715: la[k][i].Y = k;
716: }
717: }
718: FARestoreGlobalArray(fa,g,j,&la);
720: FAGlobalToLocal(fa,g,l);
721: DrawFA(fa,g);
722: DrawFA(fa,l);
724: for (p=0; p<3; p++) {
725: FAGetLocalCorners(fa,p,&x,&y,&m,&n);
726: FAGetLocalArray(fa,l,p,&la);
727: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n[%d] Local array for region %d \n",rank,p+1);
728: for (k=y+n-1; k>=y; k--) { /* print in reverse order to match diagram in paper */
729: for (i=x; i<x+m; i++) {
730: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"(%G,%G) ",la[k][i].X,la[k][i].Y);
731: }
732: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n");
733: }
734: FARestoreLocalArray(fa,l,p,&la);
735: PetscSynchronizedFlush(PETSC_COMM_WORLD);
736: }
737: }
738: VecDestroy(&g);
739: VecDestroy(&l);
740: return(0);
741: }
745: int main(int argc,char **argv)
746: {
747: FA fa;
749: Vec g,l;
751: PetscInitialize(&argc,&argv,0,help);
753: FACreate(&fa);
754: /* FATest(fa);*/
756: FAGetGlobalVector(fa,&g);
757: FAGetLocalVector(fa,&l);
759: FAMapRegion1(fa,g);
760: FAMapRegion2(fa,g);
761: FAMapRegion3(fa,g);
763: FAGlobalToLocal(fa,g,l);
764: DrawFA(fa,g);
765: DrawFA(fa,l);
767: VecDestroy(&g);
768: VecDestroy(&l);
769: FADestroy(&fa);
771: PetscFinalize();
772: return 0;
773: }