Actual source code: ex6.c
petsc-3.6.4 2016-04-12
2: static char help[] = "Demonstrates using 3 DMDA's to manage a slightly non-trivial grid";
4: #include <petscdm.h>
5: #include <petscdmda.h>
6: #include <petscdraw.h>
8: struct _p_FA {
9: MPI_Comm comm[3];
10: PetscInt xl[3],yl[3],ml[3],nl[3]; /* corners and sizes of local vector in DMDA */
11: PetscInt xg[3],yg[3],mg[3],ng[3]; /* corners and sizes of global vector in DMDA */
12: PetscInt offl[3],offg[3]; /* offset in local and global vector of region 1, 2 and 3 portions */
13: Vec g,l;
14: VecScatter vscat;
15: PetscInt p1,p2,r1,r2,r1g,r2g,sw;
16: };
17: typedef struct _p_FA *FA;
19: typedef struct {
20: PetscScalar X;
21: PetscScalar Y;
22: } Field;
24: PetscErrorCode FAGetLocalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
25: {
27: if (fa->comm[j]) {
28: *x = fa->xl[j];
29: *y = fa->yl[j];
30: *m = fa->ml[j];
31: *n = fa->nl[j];
32: } else {
33: *x = *y = *m = *n = 0;
34: }
35: return(0);
36: }
38: PetscErrorCode FAGetGlobalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
39: {
41: if (fa->comm[j]) {
42: *x = fa->xg[j];
43: *y = fa->yg[j];
44: *m = fa->mg[j];
45: *n = fa->ng[j];
46: } else {
47: *x = *y = *m = *n = 0;
48: }
49: return(0);
50: }
52: PetscErrorCode FAGetLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
53: {
55: PetscScalar *va;
56: PetscInt i;
57: Field **a;
60: if (fa->comm[j]) {
61: VecGetArray(v,&va);
62: PetscMalloc1(fa->nl[j],&a);
63: for (i=0; i<fa->nl[j]; i++) (a)[i] = (Field*) (va + 2*fa->offl[j] + i*2*fa->ml[j] - 2*fa->xl[j]);
64: *f = a - fa->yl[j];
65: VecRestoreArray(v,&va);
66: } else {
67: *f = 0;
68: }
69: return(0);
70: }
72: PetscErrorCode FARestoreLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
73: {
75: void *dummy;
78: if (fa->comm[j]) {
79: dummy = *f + fa->yl[j];
80: PetscFree(dummy);
81: }
82: return(0);
83: }
85: PetscErrorCode FAGetGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
86: {
88: PetscScalar *va;
89: PetscInt i;
90: Field **a;
93: if (fa->comm[j]) {
94: VecGetArray(v,&va);
95: PetscMalloc1(fa->ng[j],&a);
96: for (i=0; i<fa->ng[j]; i++) (a)[i] = (Field*) (va + 2*fa->offg[j] + i*2*fa->mg[j] - 2*fa->xg[j]);
97: *f = a - fa->yg[j];
98: VecRestoreArray(v,&va);
99: } else {
100: *f = 0;
101: }
102: return(0);
103: }
105: PetscErrorCode FARestoreGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
106: {
108: void *dummy;
111: if (fa->comm[j]) {
112: dummy = *f + fa->yg[j];
113: PetscFree(dummy);
114: }
115: return(0);
116: }
118: PetscErrorCode FAGetGlobalVector(FA fa,Vec *v)
119: {
123: VecDuplicate(fa->g,v);
124: return(0);
125: }
127: PetscErrorCode FAGetLocalVector(FA fa,Vec *v)
128: {
132: VecDuplicate(fa->l,v);
133: return(0);
134: }
136: PetscErrorCode FAGlobalToLocal(FA fa,Vec g,Vec l)
137: {
141: VecScatterBegin(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
142: VecScatterEnd(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
143: return(0);
144: }
146: PetscErrorCode FADestroy(FA *fa)
147: {
151: VecDestroy(&(*fa)->g);
152: VecDestroy(&(*fa)->l);
153: VecScatterDestroy(&(*fa)->vscat);
154: PetscFree(*fa);
155: return(0);
156: }
158: PetscErrorCode FACreate(FA *infa)
159: {
160: FA fa;
161: PetscMPIInt rank;
162: PetscInt tonglobal,globalrstart,x,nx,y,ny,*tonatural,i,j,*to,*from,offt[3];
163: PetscInt *fromnatural,fromnglobal,nscat,nlocal,cntl1,cntl2,cntl3,*indices;
166: /* Each DMDA manages the local vector for the portion of region 1, 2, and 3 for that processor
167: Each DMDA can belong on any subset (overlapping between DMDA's or not) of processors
168: For processes that a particular DMDA does not exist on, the corresponding comm should be set to zero
169: */
170: DM da1 = 0,da2 = 0,da3 = 0;
171: /*
172: v1, v2, v3 represent the local vector for a single DMDA
173: */
174: Vec vl1 = 0,vl2 = 0,vl3 = 0, vg1 = 0, vg2 = 0,vg3 = 0;
176: /*
177: globalvec and friends represent the global vectors that are used for the PETSc solvers
178: localvec represents the concatenation of the (up to) 3 local vectors; vl1, vl2, vl3
180: tovec and friends represent intermediate vectors that are ONLY used for setting up the
181: final communication patterns. Once this setup routine is complete they are destroyed.
182: The tovec is like the globalvec EXCEPT it has redundant locations for the ghost points
183: between regions 2+3 and 1.
184: */
185: AO toao,globalao;
186: IS tois,globalis,is;
187: Vec tovec,globalvec,localvec;
188: VecScatter vscat;
189: PetscScalar *globalarray,*localarray,*toarray;
191: PetscNew(&fa);
192: /*
193: fa->sw is the stencil width
195: fa->p1 is the width of region 1, fa->p2 the width of region 2 (must be the same)
196: fa->r1 height of region 1
197: fa->r2 height of region 2
199: fa->r2 is also the height of region 3-4
200: (fa->p1 - fa->p2)/2 is the width of both region 3 and region 4
201: */
202: fa->p1 = 24;
203: fa->p2 = 15;
204: fa->r1 = 6;
205: fa->r2 = 6;
206: fa->sw = 1;
207: fa->r1g = fa->r1 + fa->sw;
208: fa->r2g = fa->r2 + fa->sw;
210: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
212: fa->comm[0] = PETSC_COMM_WORLD;
213: fa->comm[1] = PETSC_COMM_WORLD;
214: fa->comm[2] = PETSC_COMM_WORLD;
215: /* Test case with different communicators */
216: /* Normally one would use MPI_Comm routines to build MPI communicators on which you wish to partition the DMDAs*/
217: /*
218: if (!rank) {
219: fa->comm[0] = PETSC_COMM_SELF;
220: fa->comm[1] = 0;
221: fa->comm[2] = 0;
222: } else if (rank == 1) {
223: fa->comm[0] = 0;
224: fa->comm[1] = PETSC_COMM_SELF;
225: fa->comm[2] = 0;
226: } else {
227: fa->comm[0] = 0;
228: fa->comm[1] = 0;
229: fa->comm[2] = PETSC_COMM_SELF;
230: } */
232: if (fa->p2 > fa->p1 - 3) SETERRQ(PETSC_COMM_SELF,1,"Width of region fa->p2 must be at least 3 less then width of region 1");
233: if (!((fa->p2 - fa->p1) % 2)) SETERRQ(PETSC_COMM_SELF,1,"width of region 3 must NOT be divisible by 2");
235: if (fa->comm[1]) {
236: DMDACreate2d(fa->comm[1],DM_BOUNDARY_PERIODIC,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da2);
237: DMGetLocalVector(da2,&vl2);
238: DMGetGlobalVector(da2,&vg2);
239: }
240: if (fa->comm[2]) {
241: DMDACreate2d(fa->comm[2],DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1-fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da3);
242: DMGetLocalVector(da3,&vl3);
243: DMGetGlobalVector(da3,&vg3);
244: }
245: if (fa->comm[0]) {
246: DMDACreate2d(fa->comm[0],DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1,fa->r1g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da1);
247: DMGetLocalVector(da1,&vl1);
248: DMGetGlobalVector(da1,&vg1);
249: }
251: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
252: for global vector with redundancy */
253: tonglobal = 0;
254: if (fa->comm[1]) {
255: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
256: tonglobal += nx*ny;
257: }
258: if (fa->comm[2]) {
259: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
260: tonglobal += nx*ny;
261: }
262: if (fa->comm[0]) {
263: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
264: tonglobal += nx*ny;
265: }
266: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,tonglobal);
267: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
269: /* Get tonatural number for each node */
270: PetscMalloc1(tonglobal+1,&tonatural);
271: tonglobal = 0;
272: if (fa->comm[1]) {
273: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
274: for (j=0; j<ny; j++) {
275: for (i=0; i<nx; i++) {
276: tonatural[tonglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
277: }
278: }
279: }
280: if (fa->comm[2]) {
281: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
282: for (j=0; j<ny; j++) {
283: for (i=0; i<nx; i++) {
284: if (x + i < (fa->p1 - fa->p2)/2) tonatural[tonglobal++] = x + i + fa->p1*(y + j);
285: else tonatural[tonglobal++] = fa->p2 + x + i + fa->p1*(y + j);
286: }
287: }
288: }
289: if (fa->comm[0]) {
290: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
291: for (j=0; j<ny; j++) {
292: for (i=0; i<nx; i++) {
293: tonatural[tonglobal++] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
294: }
295: }
296: }
297: /* PetscIntView(tonglobal,tonatural,PETSC_VIEWER_STDOUT_WORLD); */
298: AOCreateBasic(PETSC_COMM_WORLD,tonglobal,tonatural,0,&toao);
299: PetscFree(tonatural);
301: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
302: for global vector without redundancy */
303: fromnglobal = 0;
304: fa->offg[1] = 0;
305: offt[1] = 0;
306: if (fa->comm[1]) {
307: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
308: offt[2] = nx*ny;
309: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
310: fromnglobal += nx*ny;
311: fa->offg[2] = fromnglobal;
312: } else {
313: offt[2] = 0;
314: fa->offg[2] = 0;
315: }
316: if (fa->comm[2]) {
317: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
318: offt[0] = offt[2] + nx*ny;
319: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
320: fromnglobal += nx*ny;
321: fa->offg[0] = fromnglobal;
322: } else {
323: offt[0] = offt[2];
324: fa->offg[0] = fromnglobal;
325: }
326: if (fa->comm[0]) {
327: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
328: if (y == 0) ny--; /* includes the ghost points on the lower side */
329: fromnglobal += nx*ny;
330: }
331: MPI_Scan(&fromnglobal,&globalrstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
332: globalrstart -= fromnglobal;
333: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,fromnglobal);
334: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
336: /* Get fromnatural number for each node */
337: PetscMalloc1(fromnglobal+1,&fromnatural);
338: fromnglobal = 0;
339: if (fa->comm[1]) {
340: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
341: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
342: fa->xg[1] = x; fa->yg[1] = y; fa->mg[1] = nx; fa->ng[1] = ny;
343: DMDAGetGhostCorners(da2,&fa->xl[1],&fa->yl[1],0,&fa->ml[1],&fa->nl[1],0);
344: for (j=0; j<ny; j++) {
345: for (i=0; i<nx; i++) {
346: fromnatural[fromnglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
347: }
348: }
349: }
350: if (fa->comm[2]) {
351: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
352: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
353: fa->xg[2] = x; fa->yg[2] = y; fa->mg[2] = nx; fa->ng[2] = ny;
354: DMDAGetGhostCorners(da3,&fa->xl[2],&fa->yl[2],0,&fa->ml[2],&fa->nl[2],0);
355: for (j=0; j<ny; j++) {
356: for (i=0; i<nx; i++) {
357: if (x + i < (fa->p1 - fa->p2)/2) fromnatural[fromnglobal++] = x + i + fa->p1*(y + j);
358: else fromnatural[fromnglobal++] = fa->p2 + x + i + fa->p1*(y + j);
359: }
360: }
361: }
362: if (fa->comm[0]) {
363: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
364: if (y == 0) ny--; /* includes the ghost points on the lower side */
365: else y--;
366: fa->xg[0] = x; fa->yg[0] = y; fa->mg[0] = nx; fa->ng[0] = ny;
367: DMDAGetGhostCorners(da1,&fa->xl[0],&fa->yl[0],0,&fa->ml[0],&fa->nl[0],0);
368: for (j=0; j<ny; j++) {
369: for (i=0; i<nx; i++) {
370: fromnatural[fromnglobal++] = fa->p1*fa->r2 + x + i + fa->p1*(y + j);
371: }
372: }
373: }
374: /*PetscIntView(fromnglobal,fromnatural,PETSC_VIEWER_STDOUT_WORLD);*/
375: AOCreateBasic(PETSC_COMM_WORLD,fromnglobal,fromnatural,0,&globalao);
376: PetscFree(fromnatural);
378: /* ---------------------------------------------------*/
379: /* Create the scatter that updates 1 from 2 and 3 and 3 and 2 from 1 */
380: /* currently handles stencil width of 1 ONLY */
381: PetscMalloc1(tonglobal,&to);
382: PetscMalloc1(tonglobal,&from);
383: nscat = 0;
384: if (fa->comm[1]) {
385: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
386: for (j=0; j<ny; j++) {
387: for (i=0; i<nx; i++) {
388: to[nscat] = from[nscat] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);nscat++;
389: }
390: }
391: }
392: if (fa->comm[2]) {
393: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
394: for (j=0; j<ny; j++) {
395: for (i=0; i<nx; i++) {
396: if (x + i < (fa->p1 - fa->p2)/2) {
397: to[nscat] = from[nscat] = x + i + fa->p1*(y + j);nscat++;
398: } else {
399: to[nscat] = from[nscat] = fa->p2 + x + i + fa->p1*(y + j);nscat++;
400: }
401: }
402: }
403: }
404: if (fa->comm[0]) {
405: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
406: for (j=0; j<ny; j++) {
407: for (i=0; i<nx; i++) {
408: to[nscat] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
409: from[nscat++] = fa->p1*(fa->r2 - 1) + x + i + fa->p1*(y + j);
410: }
411: }
412: }
413: AOApplicationToPetsc(toao,nscat,to);
414: AOApplicationToPetsc(globalao,nscat,from);
415: ISCreateGeneral(PETSC_COMM_WORLD,nscat,to,PETSC_COPY_VALUES,&tois);
416: ISCreateGeneral(PETSC_COMM_WORLD,nscat,from,PETSC_COPY_VALUES,&globalis);
417: PetscFree(to);
418: PetscFree(from);
419: VecCreateMPI(PETSC_COMM_WORLD,tonglobal,PETSC_DETERMINE,&tovec);
420: VecCreateMPI(PETSC_COMM_WORLD,fromnglobal,PETSC_DETERMINE,&globalvec);
421: VecScatterCreate(globalvec,globalis,tovec,tois,&vscat);
422: ISDestroy(&tois);
423: ISDestroy(&globalis);
424: AODestroy(&globalao);
425: AODestroy(&toao);
427: /* fill up global vector without redundant values with PETSc global numbering */
428: VecGetArray(globalvec,&globalarray);
429: for (i=0; i<fromnglobal; i++) {
430: globalarray[i] = globalrstart + i;
431: }
432: VecRestoreArray(globalvec,&globalarray);
434: /* scatter PETSc global indices to redundant valueed array */
435: VecScatterBegin(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
436: VecScatterEnd(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
438: /* Create local vector that is the concatenation of the local vectors */
439: nlocal = 0;
440: cntl1 = cntl2 = cntl3 = 0;
441: if (fa->comm[1]) {
442: VecGetSize(vl2,&cntl2);
443: nlocal += cntl2;
444: }
445: if (fa->comm[2]) {
446: VecGetSize(vl3,&cntl3);
447: nlocal += cntl3;
448: }
449: if (fa->comm[0]) {
450: VecGetSize(vl1,&cntl1);
451: nlocal += cntl1;
452: }
453: fa->offl[0] = cntl2 + cntl3;
454: fa->offl[1] = 0;
455: fa->offl[2] = cntl2;
456: VecCreateSeq(PETSC_COMM_SELF,nlocal,&localvec);
458: /* cheat so that vl1, vl2, vl3 shared array memory with localvec */
459: VecGetArray(localvec,&localarray);
460: VecGetArray(tovec,&toarray);
461: if (fa->comm[1]) {
462: VecPlaceArray(vl2,localarray+fa->offl[1]);
463: VecPlaceArray(vg2,toarray+offt[1]);
464: DMGlobalToLocalBegin(da2,vg2,INSERT_VALUES,vl2);
465: DMGlobalToLocalEnd(da2,vg2,INSERT_VALUES,vl2);
466: DMRestoreGlobalVector(da2,&vg2);
467: }
468: if (fa->comm[2]) {
469: VecPlaceArray(vl3,localarray+fa->offl[2]);
470: VecPlaceArray(vg3,toarray+offt[2]);
471: DMGlobalToLocalBegin(da3,vg3,INSERT_VALUES,vl3);
472: DMGlobalToLocalEnd(da3,vg3,INSERT_VALUES,vl3);
473: DMRestoreGlobalVector(da3,&vg3);
474: }
475: if (fa->comm[0]) {
476: VecPlaceArray(vl1,localarray+fa->offl[0]);
477: VecPlaceArray(vg1,toarray+offt[0]);
478: DMGlobalToLocalBegin(da1,vg1,INSERT_VALUES,vl1);
479: DMGlobalToLocalEnd(da1,vg1,INSERT_VALUES,vl1);
480: DMRestoreGlobalVector(da1,&vg1);
481: }
482: VecRestoreArray(localvec,&localarray);
483: VecRestoreArray(tovec,&toarray);
485: /* no longer need the redundant vector and VecScatter to it */
486: VecScatterDestroy(&vscat);
487: VecDestroy(&tovec);
489: /* Create final scatter that goes directly from globalvec to localvec */
490: /* this is the one to be used in the application code */
491: PetscMalloc1(nlocal,&indices);
492: VecGetArray(localvec,&localarray);
493: for (i=0; i<nlocal; i++) {
494: indices[i] = (PetscInt) (localarray[i]);
495: }
496: VecRestoreArray(localvec,&localarray);
497: ISCreateBlock(PETSC_COMM_WORLD,2,nlocal,indices,PETSC_COPY_VALUES,&is);
498: PetscFree(indices);
500: VecCreateSeq(PETSC_COMM_SELF,2*nlocal,&fa->l);
501: VecCreateMPI(PETSC_COMM_WORLD,2*fromnglobal,PETSC_DETERMINE,&fa->g);
503: VecScatterCreate(fa->g,is,fa->l,NULL,&fa->vscat);
504: ISDestroy(&is);
506: VecDestroy(&globalvec);
507: VecDestroy(&localvec);
508: if (fa->comm[0]) {
509: DMRestoreLocalVector(da1,&vl1);
510: DMDestroy(&da1);
511: }
512: if (fa->comm[1]) {
513: DMRestoreLocalVector(da2,&vl2);
514: DMDestroy(&da2);
515: }
516: if (fa->comm[2]) {
517: DMRestoreLocalVector(da3,&vl3);
518: DMDestroy(&da3);
519: }
520: *infa = fa;
521: return(0);
522: }
524: /* Crude graphics to test that the ghost points are properly updated */
525: #include <petscdraw.h>
527: typedef struct {
528: PetscInt m[3],n[3];
529: PetscScalar *xy[3];
530: } ZoomCtx;
532: PetscErrorCode DrawPatch(PetscDraw draw,void *ctx)
533: {
534: ZoomCtx *zctx = (ZoomCtx*)ctx;
536: PetscInt m,n,i,j,id,k;
537: PetscReal x1,x2,x3,x4,y_1,y2,y3,y4;
538: PetscScalar *xy;
541: for (k=0; k<3; k++) {
542: m = zctx->m[k];
543: n = zctx->n[k];
544: xy = zctx->xy[k];
546: for (j=0; j<n-1; j++) {
547: for (i=0; i<m-1; i++) {
548: id = i+j*m; x1 = xy[2*id];y_1 = xy[2*id+1];
549: id = i+j*m+1; x2 = xy[2*id];y2 = xy[2*id+1];
550: id = i+j*m+1+m;x3 = xy[2*id];y3 = xy[2*id+1];
551: id = i+j*m+m; x4 = xy[2*id];y4 = xy[2*id+1];
552: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
553: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
554: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
555: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
556: }
557: }
558: }
559: PetscDrawFlush(draw);
560: return(0);
561: }
563: PetscErrorCode DrawFA(FA fa,Vec v)
564: {
566: PetscScalar *va;
567: ZoomCtx zctx;
568: PetscReal xmint = 10000.0,xmaxt = -10000.0,ymint = 100000.0,ymaxt = -10000.0;
569: PetscReal xmin,xmax,ymin,ymax;
570: PetscInt i,vn,ln,j;
573: VecGetArray(v,&va);
574: VecGetSize(v,&vn);
575: VecGetSize(fa->l,&ln);
576: for (j=0; j<3; j++) {
577: if (vn == ln) {
578: zctx.xy[j] = va + 2*fa->offl[j];
579: zctx.m[j] = fa->ml[j];
580: zctx.n[j] = fa->nl[j];
581: } else {
582: zctx.xy[j] = va + 2*fa->offg[j];
583: zctx.m[j] = fa->mg[j];
584: zctx.n[j] = fa->ng[j];
585: }
586: for (i=0; i<zctx.m[j]*zctx.n[j]; i++) {
587: if (zctx.xy[j][2*i] > xmax) xmax = zctx.xy[j][2*i];
588: if (zctx.xy[j][2*i] < xmin) xmin = zctx.xy[j][2*i];
589: if (zctx.xy[j][2*i+1] > ymax) ymax = zctx.xy[j][2*i+1];
590: if (zctx.xy[j][2*i+1] < ymin) ymin = zctx.xy[j][2*i+1];
591: }
592: }
593: MPI_Allreduce(&xmin,&xmint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
594: MPI_Allreduce(&xmax,&xmaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
595: MPI_Allreduce(&ymin,&ymint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
596: MPI_Allreduce(&ymax,&ymaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
597: xmin = xmint - .2*(xmaxt - xmint);
598: xmax = xmaxt + .2*(xmaxt - xmint);
599: ymin = ymint - .2*(ymaxt - ymint);
600: ymax = ymaxt + .2*(ymaxt - ymint);
601: #if defined(PETSC_HAVE_X) || defined(PETSC_HAVE_OPENGL)
602: {
603: PetscDraw draw;
605: PetscDrawCreate(PETSC_COMM_WORLD,0,"meshes",PETSC_DECIDE,PETSC_DECIDE,700,700,&draw);
606: PetscDrawSetFromOptions(draw);
607: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
608: PetscDrawZoom(draw,DrawPatch,&zctx);
609: VecRestoreArray(v,&va);
610: PetscDrawDestroy(&draw);
611: }
612: #endif
613: return(0);
614: }
616: /* crude mappings from rectangular arrays to the true geometry. These are ONLY for testing!
617: they will not be used the actual code */
618: PetscErrorCode FAMapRegion3(FA fa,Vec g)
619: {
621: PetscReal R = 1.0,Rscale,Ascale;
622: PetscInt i,k,x,y,m,n;
623: Field **ga;
626: Rscale = R/(fa->r2-1);
627: Ascale = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
629: FAGetGlobalCorners(fa,2,&x,&y,&m,&n);
630: FAGetGlobalArray(fa,g,2,&ga);
631: for (k=y; k<y+n; k++) {
632: for (i=x; i<x+m; i++) {
633: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(1.*PETSC_PI/6. + i*Ascale);
634: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(1.*PETSC_PI/6. + i*Ascale) - 4.*R;
635: }
636: }
637: FARestoreGlobalArray(fa,g,2,&ga);
638: return(0);
639: }
641: PetscErrorCode FAMapRegion2(FA fa,Vec g)
642: {
644: PetscReal R = 1.0,Rscale,Ascale;
645: PetscInt i,k,x,y,m,n;
646: Field **ga;
649: Rscale = R/(fa->r2-1);
650: Ascale = 2.0*PETSC_PI/fa->p2;
652: FAGetGlobalCorners(fa,1,&x,&y,&m,&n);
653: FAGetGlobalArray(fa,g,1,&ga);
654: for (k=y; k<y+n; k++) {
655: for (i=x; i<x+m; i++) {
656: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(i*Ascale - PETSC_PI/2.0);
657: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(i*Ascale - PETSC_PI/2.0);
658: }
659: }
660: FARestoreGlobalArray(fa,g,1,&ga);
661: return(0);
662: }
664: PetscErrorCode FAMapRegion1(FA fa,Vec g)
665: {
667: PetscReal R = 1.0,Rscale,Ascale1,Ascale3;
668: PetscInt i,k,x,y,m,n;
669: Field **ga;
672: Rscale = R/(fa->r1-1);
673: Ascale1 = 2.0*PETSC_PI/fa->p2;
674: Ascale3 = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
676: FAGetGlobalCorners(fa,0,&x,&y,&m,&n);
677: FAGetGlobalArray(fa,g,0,&ga);
679: /* This mapping is WRONG! Not sure how to do it so I've done a poor job of
680: it. You can see that the grid connections are correct. */
681: for (k=y; k<y+n; k++) {
682: for (i=x; i<x+m; i++) {
683: if (i < (fa->p1-fa->p2)/2) {
684: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(i*Ascale3);
685: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(i*Ascale3) - 4.*R;
686: } else if (i > fa->p2 + (fa->p1 - fa->p2)/2) {
687: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(PETSC_PI+i*Ascale3);
688: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(PETSC_PI+i*Ascale3) - 4.*R;
689: } else {
690: ga[k][i].X = (2.*R + k*Rscale)*PetscCosScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
691: ga[k][i].Y = (2.*R + k*Rscale)*PetscSinScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
692: }
693: }
694: }
695: FARestoreGlobalArray(fa,g,0,&ga);
696: return(0);
697: }
699: /* Simple test to check that the ghost points are properly updated */
700: PetscErrorCode FATest(FA fa)
701: {
703: Vec l,g;
704: Field **la;
705: PetscInt x,y,m,n,j,i,k,p;
706: PetscMPIInt rank;
709: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
711: FAGetGlobalVector(fa,&g);
712: FAGetLocalVector(fa,&l);
714: /* fill up global vector of one region at a time with ITS logical coordinates, then update LOCAL
715: vector; print local vectors to confirm they are correctly filled */
716: for (j=0; j<3; j++) {
717: VecSet(g,0.0);
718: FAGetGlobalCorners(fa,j,&x,&y,&m,&n);
719: PetscPrintf(PETSC_COMM_WORLD,"\nFilling global region %d, showing local results \n",j+1);
720: FAGetGlobalArray(fa,g,j,&la);
721: for (k=y; k<y+n; k++) {
722: for (i=x; i<x+m; i++) {
723: la[k][i].X = i;
724: la[k][i].Y = k;
725: }
726: }
727: FARestoreGlobalArray(fa,g,j,&la);
729: FAGlobalToLocal(fa,g,l);
730: DrawFA(fa,g);
731: DrawFA(fa,l);
733: for (p=0; p<3; p++) {
734: FAGetLocalCorners(fa,p,&x,&y,&m,&n);
735: FAGetLocalArray(fa,l,p,&la);
736: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n[%d] Local array for region %d \n",rank,p+1);
737: for (k=y+n-1; k>=y; k--) { /* print in reverse order to match diagram in paper */
738: for (i=x; i<x+m; i++) {
739: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"(%g,%g) ",(double)la[k][i].X,(double)la[k][i].Y);
740: }
741: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n");
742: }
743: FARestoreLocalArray(fa,l,p,&la);
744: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
745: }
746: }
747: VecDestroy(&g);
748: VecDestroy(&l);
749: return(0);
750: }
754: int main(int argc,char **argv)
755: {
756: FA fa;
758: Vec g,l;
760: PetscInitialize(&argc,&argv,0,help);
762: FACreate(&fa);
763: /* FATest(fa);*/
765: FAGetGlobalVector(fa,&g);
766: FAGetLocalVector(fa,&l);
768: FAMapRegion1(fa,g);
769: FAMapRegion2(fa,g);
770: FAMapRegion3(fa,g);
772: FAGlobalToLocal(fa,g,l);
773: DrawFA(fa,g);
774: DrawFA(fa,l);
776: VecDestroy(&g);
777: VecDestroy(&l);
778: FADestroy(&fa);
780: PetscFinalize();
781: return 0;
782: }