Actual source code: ex6.c
petsc-3.4.5 2014-06-29
2: static char help[] = "Demonstrates using 3 DMDA's to manage a slightly non-trivial grid";
4: #include <petscdmda.h>
5: #include <petscdraw.h>
7: struct _p_FA {
8: MPI_Comm comm[3];
9: PetscInt xl[3],yl[3],ml[3],nl[3]; /* corners and sizes of local vector in DMDA */
10: PetscInt xg[3],yg[3],mg[3],ng[3]; /* corners and sizes of global vector in DMDA */
11: PetscInt offl[3],offg[3]; /* offset in local and global vector of region 1, 2 and 3 portions */
12: Vec g,l;
13: VecScatter vscat;
14: PetscInt p1,p2,r1,r2,r1g,r2g,sw;
15: };
16: typedef struct _p_FA *FA;
18: typedef struct {
19: PetscScalar X;
20: PetscScalar Y;
21: } Field;
23: PetscErrorCode FAGetLocalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
24: {
26: if (fa->comm[j]) {
27: *x = fa->xl[j];
28: *y = fa->yl[j];
29: *m = fa->ml[j];
30: *n = fa->nl[j];
31: } else {
32: *x = *y = *m = *n = 0;
33: }
34: return(0);
35: }
37: PetscErrorCode FAGetGlobalCorners(FA fa,PetscInt j,PetscInt *x,PetscInt *y,PetscInt *m,PetscInt *n)
38: {
40: if (fa->comm[j]) {
41: *x = fa->xg[j];
42: *y = fa->yg[j];
43: *m = fa->mg[j];
44: *n = fa->ng[j];
45: } else {
46: *x = *y = *m = *n = 0;
47: }
48: return(0);
49: }
51: PetscErrorCode FAGetLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
52: {
54: PetscScalar *va;
55: PetscInt i;
56: Field **a;
59: if (fa->comm[j]) {
60: VecGetArray(v,&va);
61: PetscMalloc(fa->nl[j]*sizeof(Field*),&a);
62: for (i=0; i<fa->nl[j]; i++) (a)[i] = (Field*) (va + 2*fa->offl[j] + i*2*fa->ml[j] - 2*fa->xl[j]);
63: *f = a - fa->yl[j];
64: VecRestoreArray(v,&va);
65: } else {
66: *f = 0;
67: }
68: return(0);
69: }
71: PetscErrorCode FARestoreLocalArray(FA fa,Vec v,PetscInt j,Field ***f)
72: {
74: void *dummy;
77: if (fa->comm[j]) {
78: dummy = *f + fa->yl[j];
79: PetscFree(dummy);
80: }
81: return(0);
82: }
84: PetscErrorCode FAGetGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
85: {
87: PetscScalar *va;
88: PetscInt i;
89: Field **a;
92: if (fa->comm[j]) {
93: VecGetArray(v,&va);
94: PetscMalloc(fa->ng[j]*sizeof(Field*),&a);
95: for (i=0; i<fa->ng[j]; i++) (a)[i] = (Field*) (va + 2*fa->offg[j] + i*2*fa->mg[j] - 2*fa->xg[j]);
96: *f = a - fa->yg[j];
97: VecRestoreArray(v,&va);
98: } else {
99: *f = 0;
100: }
101: return(0);
102: }
104: PetscErrorCode FARestoreGlobalArray(FA fa,Vec v,PetscInt j,Field ***f)
105: {
107: void *dummy;
110: if (fa->comm[j]) {
111: dummy = *f + fa->yg[j];
112: PetscFree(dummy);
113: }
114: return(0);
115: }
117: PetscErrorCode FAGetGlobalVector(FA fa,Vec *v)
118: {
122: VecDuplicate(fa->g,v);
123: return(0);
124: }
126: PetscErrorCode FAGetLocalVector(FA fa,Vec *v)
127: {
131: VecDuplicate(fa->l,v);
132: return(0);
133: }
135: PetscErrorCode FAGlobalToLocal(FA fa,Vec g,Vec l)
136: {
140: VecScatterBegin(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
141: VecScatterEnd(fa->vscat,g,l,INSERT_VALUES,SCATTER_FORWARD);
142: return(0);
143: }
145: PetscErrorCode FADestroy(FA *fa)
146: {
150: VecDestroy(&(*fa)->g);
151: VecDestroy(&(*fa)->l);
152: VecScatterDestroy(&(*fa)->vscat);
153: PetscFree(*fa);
154: return(0);
155: }
157: PetscErrorCode FACreate(FA *infa)
158: {
159: FA fa;
160: PetscMPIInt rank;
161: PetscInt tonglobal,globalrstart,x,nx,y,ny,*tonatural,i,j,*to,*from,offt[3];
162: PetscInt *fromnatural,fromnglobal,nscat,nlocal,cntl1,cntl2,cntl3,*indices;
165: /* Each DMDA manages the local vector for the portion of region 1, 2, and 3 for that processor
166: Each DMDA can belong on any subset (overlapping between DMDA's or not) of processors
167: For processes that a particular DMDA does not exist on, the corresponding comm should be set to zero
168: */
169: DM da1 = 0,da2 = 0,da3 = 0;
170: /*
171: v1, v2, v3 represent the local vector for a single DMDA
172: */
173: Vec vl1 = 0,vl2 = 0,vl3 = 0, vg1 = 0, vg2 = 0,vg3 = 0;
175: /*
176: globalvec and friends represent the global vectors that are used for the PETSc solvers
177: localvec represents the concatenation of the (up to) 3 local vectors; vl1, vl2, vl3
179: tovec and friends represent intermediate vectors that are ONLY used for setting up the
180: final communication patterns. Once this setup routine is complete they are destroyed.
181: The tovec is like the globalvec EXCEPT it has redundant locations for the ghost points
182: between regions 2+3 and 1.
183: */
184: AO toao,globalao;
185: IS tois,globalis,is;
186: Vec tovec,globalvec,localvec;
187: VecScatter vscat;
188: PetscScalar *globalarray,*localarray,*toarray;
190: PetscNew(struct _p_FA,&fa);
191: /*
192: fa->sw is the stencil width
194: fa->p1 is the width of region 1, fa->p2 the width of region 2 (must be the same)
195: fa->r1 height of region 1
196: fa->r2 height of region 2
198: fa->r2 is also the height of region 3-4
199: (fa->p1 - fa->p2)/2 is the width of both region 3 and region 4
200: */
201: fa->p1 = 24;
202: fa->p2 = 15;
203: fa->r1 = 6;
204: fa->r2 = 6;
205: fa->sw = 1;
206: fa->r1g = fa->r1 + fa->sw;
207: fa->r2g = fa->r2 + fa->sw;
209: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
211: fa->comm[0] = PETSC_COMM_WORLD;
212: fa->comm[1] = PETSC_COMM_WORLD;
213: fa->comm[2] = PETSC_COMM_WORLD;
214: /* Test case with different communicators */
215: /* Normally one would use MPI_Comm routines to build MPI communicators on which you wish to partition the DMDAs*/
216: /*
217: if (!rank) {
218: fa->comm[0] = PETSC_COMM_SELF;
219: fa->comm[1] = 0;
220: fa->comm[2] = 0;
221: } else if (rank == 1) {
222: fa->comm[0] = 0;
223: fa->comm[1] = PETSC_COMM_SELF;
224: fa->comm[2] = 0;
225: } else {
226: fa->comm[0] = 0;
227: fa->comm[1] = 0;
228: fa->comm[2] = PETSC_COMM_SELF;
229: } */
231: if (fa->p2 > fa->p1 - 3) SETERRQ(PETSC_COMM_SELF,1,"Width of region fa->p2 must be at least 3 less then width of region 1");
232: if (!((fa->p2 - fa->p1) % 2)) SETERRQ(PETSC_COMM_SELF,1,"width of region 3 must NOT be divisible by 2");
234: if (fa->comm[1]) {
235: DMDACreate2d(fa->comm[1],DMDA_BOUNDARY_PERIODIC,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da2);
236: DMGetLocalVector(da2,&vl2);
237: DMGetGlobalVector(da2,&vg2);
238: }
239: if (fa->comm[2]) {
240: DMDACreate2d(fa->comm[2],DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1-fa->p2,fa->r2g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da3);
241: DMGetLocalVector(da3,&vl3);
242: DMGetGlobalVector(da3,&vg3);
243: }
244: if (fa->comm[0]) {
245: DMDACreate2d(fa->comm[0],DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_BOX,fa->p1,fa->r1g,PETSC_DECIDE,PETSC_DECIDE,1,fa->sw,NULL,NULL,&da1);
246: DMGetLocalVector(da1,&vl1);
247: DMGetGlobalVector(da1,&vg1);
248: }
250: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
251: for global vector with redundancy */
252: tonglobal = 0;
253: if (fa->comm[1]) {
254: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
255: tonglobal += nx*ny;
256: }
257: if (fa->comm[2]) {
258: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
259: tonglobal += nx*ny;
260: }
261: if (fa->comm[0]) {
262: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
263: tonglobal += nx*ny;
264: }
265: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,tonglobal);
266: PetscSynchronizedFlush(PETSC_COMM_WORLD);
268: /* Get tonatural number for each node */
269: PetscMalloc((tonglobal+1)*sizeof(PetscInt),&tonatural);
270: tonglobal = 0;
271: if (fa->comm[1]) {
272: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
273: for (j=0; j<ny; j++) {
274: for (i=0; i<nx; i++) {
275: tonatural[tonglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
276: }
277: }
278: }
279: if (fa->comm[2]) {
280: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
281: for (j=0; j<ny; j++) {
282: for (i=0; i<nx; i++) {
283: if (x + i < (fa->p1 - fa->p2)/2) tonatural[tonglobal++] = x + i + fa->p1*(y + j);
284: else tonatural[tonglobal++] = fa->p2 + x + i + fa->p1*(y + j);
285: }
286: }
287: }
288: if (fa->comm[0]) {
289: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
290: for (j=0; j<ny; j++) {
291: for (i=0; i<nx; i++) {
292: tonatural[tonglobal++] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
293: }
294: }
295: }
296: /* PetscIntView(tonglobal,tonatural,PETSC_VIEWER_STDOUT_WORLD); */
297: AOCreateBasic(PETSC_COMM_WORLD,tonglobal,tonatural,0,&toao);
298: PetscFree(tonatural);
300: /* count the number of unknowns owned on each processor and determine the starting point of each processors ownership
301: for global vector without redundancy */
302: fromnglobal = 0;
303: fa->offg[1] = 0;
304: offt[1] = 0;
305: if (fa->comm[1]) {
306: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
307: offt[2] = nx*ny;
308: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
309: fromnglobal += nx*ny;
310: fa->offg[2] = fromnglobal;
311: } else {
312: offt[2] = 0;
313: fa->offg[2] = 0;
314: }
315: if (fa->comm[2]) {
316: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
317: offt[0] = offt[2] + nx*ny;
318: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
319: fromnglobal += nx*ny;
320: fa->offg[0] = fromnglobal;
321: } else {
322: offt[0] = offt[2];
323: fa->offg[0] = fromnglobal;
324: }
325: if (fa->comm[0]) {
326: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
327: if (y == 0) ny--; /* includes the ghost points on the lower side */
328: fromnglobal += nx*ny;
329: }
330: MPI_Scan(&fromnglobal,&globalrstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
331: globalrstart -= fromnglobal;
332: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"[%d] Number of unknowns owned %d\n",rank,fromnglobal);
333: PetscSynchronizedFlush(PETSC_COMM_WORLD);
335: /* Get fromnatural number for each node */
336: PetscMalloc((fromnglobal+1)*sizeof(PetscInt),&fromnatural);
337: fromnglobal = 0;
338: if (fa->comm[1]) {
339: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
340: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
341: fa->xg[1] = x; fa->yg[1] = y; fa->mg[1] = nx; fa->ng[1] = ny;
342: DMDAGetGhostCorners(da2,&fa->xl[1],&fa->yl[1],0,&fa->ml[1],&fa->nl[1],0);
343: for (j=0; j<ny; j++) {
344: for (i=0; i<nx; i++) {
345: fromnatural[fromnglobal++] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);
346: }
347: }
348: }
349: if (fa->comm[2]) {
350: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
351: if (y+ny == fa->r2g) ny--; /* includes the ghost points on the upper side */
352: fa->xg[2] = x; fa->yg[2] = y; fa->mg[2] = nx; fa->ng[2] = ny;
353: DMDAGetGhostCorners(da3,&fa->xl[2],&fa->yl[2],0,&fa->ml[2],&fa->nl[2],0);
354: for (j=0; j<ny; j++) {
355: for (i=0; i<nx; i++) {
356: if (x + i < (fa->p1 - fa->p2)/2) fromnatural[fromnglobal++] = x + i + fa->p1*(y + j);
357: else fromnatural[fromnglobal++] = fa->p2 + x + i + fa->p1*(y + j);
358: }
359: }
360: }
361: if (fa->comm[0]) {
362: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
363: if (y == 0) ny--; /* includes the ghost points on the lower side */
364: else y--;
365: fa->xg[0] = x; fa->yg[0] = y; fa->mg[0] = nx; fa->ng[0] = ny;
366: DMDAGetGhostCorners(da1,&fa->xl[0],&fa->yl[0],0,&fa->ml[0],&fa->nl[0],0);
367: for (j=0; j<ny; j++) {
368: for (i=0; i<nx; i++) {
369: fromnatural[fromnglobal++] = fa->p1*fa->r2 + x + i + fa->p1*(y + j);
370: }
371: }
372: }
373: /*PetscIntView(fromnglobal,fromnatural,PETSC_VIEWER_STDOUT_WORLD);*/
374: AOCreateBasic(PETSC_COMM_WORLD,fromnglobal,fromnatural,0,&globalao);
375: PetscFree(fromnatural);
377: /* ---------------------------------------------------*/
378: /* Create the scatter that updates 1 from 2 and 3 and 3 and 2 from 1 */
379: /* currently handles stencil width of 1 ONLY */
380: PetscMalloc(tonglobal*sizeof(PetscInt),&to);
381: PetscMalloc(tonglobal*sizeof(PetscInt),&from);
382: nscat = 0;
383: if (fa->comm[1]) {
384: DMDAGetCorners(da2,&x,&y,0,&nx,&ny,0);
385: for (j=0; j<ny; j++) {
386: for (i=0; i<nx; i++) {
387: to[nscat] = from[nscat] = (fa->p1 - fa->p2)/2 + x + i + fa->p1*(y + j);nscat++;
388: }
389: }
390: }
391: if (fa->comm[2]) {
392: DMDAGetCorners(da3,&x,&y,0,&nx,&ny,0);
393: for (j=0; j<ny; j++) {
394: for (i=0; i<nx; i++) {
395: if (x + i < (fa->p1 - fa->p2)/2) {
396: to[nscat] = from[nscat] = x + i + fa->p1*(y + j);nscat++;
397: } else {
398: to[nscat] = from[nscat] = fa->p2 + x + i + fa->p1*(y + j);nscat++;
399: }
400: }
401: }
402: }
403: if (fa->comm[0]) {
404: DMDAGetCorners(da1,&x,&y,0,&nx,&ny,0);
405: for (j=0; j<ny; j++) {
406: for (i=0; i<nx; i++) {
407: to[nscat] = fa->p1*fa->r2g + x + i + fa->p1*(y + j);
408: from[nscat++] = fa->p1*(fa->r2 - 1) + x + i + fa->p1*(y + j);
409: }
410: }
411: }
412: AOApplicationToPetsc(toao,nscat,to);
413: AOApplicationToPetsc(globalao,nscat,from);
414: ISCreateGeneral(PETSC_COMM_WORLD,nscat,to,PETSC_COPY_VALUES,&tois);
415: ISCreateGeneral(PETSC_COMM_WORLD,nscat,from,PETSC_COPY_VALUES,&globalis);
416: PetscFree(to);
417: PetscFree(from);
418: VecCreateMPI(PETSC_COMM_WORLD,tonglobal,PETSC_DETERMINE,&tovec);
419: VecCreateMPI(PETSC_COMM_WORLD,fromnglobal,PETSC_DETERMINE,&globalvec);
420: VecScatterCreate(globalvec,globalis,tovec,tois,&vscat);
421: ISDestroy(&tois);
422: ISDestroy(&globalis);
423: AODestroy(&globalao);
424: AODestroy(&toao);
426: /* fill up global vector without redundant values with PETSc global numbering */
427: VecGetArray(globalvec,&globalarray);
428: for (i=0; i<fromnglobal; i++) {
429: globalarray[i] = globalrstart + i;
430: }
431: VecRestoreArray(globalvec,&globalarray);
433: /* scatter PETSc global indices to redundant valueed array */
434: VecScatterBegin(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
435: VecScatterEnd(vscat,globalvec,tovec,INSERT_VALUES,SCATTER_FORWARD);
437: /* Create local vector that is the concatenation of the local vectors */
438: nlocal = 0;
439: cntl1 = cntl2 = cntl3 = 0;
440: if (fa->comm[1]) {
441: VecGetSize(vl2,&cntl2);
442: nlocal += cntl2;
443: }
444: if (fa->comm[2]) {
445: VecGetSize(vl3,&cntl3);
446: nlocal += cntl3;
447: }
448: if (fa->comm[0]) {
449: VecGetSize(vl1,&cntl1);
450: nlocal += cntl1;
451: }
452: fa->offl[0] = cntl2 + cntl3;
453: fa->offl[1] = 0;
454: fa->offl[2] = cntl2;
455: VecCreateSeq(PETSC_COMM_SELF,nlocal,&localvec);
457: /* cheat so that vl1, vl2, vl3 shared array memory with localvec */
458: VecGetArray(localvec,&localarray);
459: VecGetArray(tovec,&toarray);
460: if (fa->comm[1]) {
461: VecPlaceArray(vl2,localarray+fa->offl[1]);
462: VecPlaceArray(vg2,toarray+offt[1]);
463: DMGlobalToLocalBegin(da2,vg2,INSERT_VALUES,vl2);
464: DMGlobalToLocalEnd(da2,vg2,INSERT_VALUES,vl2);
465: DMRestoreGlobalVector(da2,&vg2);
466: }
467: if (fa->comm[2]) {
468: VecPlaceArray(vl3,localarray+fa->offl[2]);
469: VecPlaceArray(vg3,toarray+offt[2]);
470: DMGlobalToLocalBegin(da3,vg3,INSERT_VALUES,vl3);
471: DMGlobalToLocalEnd(da3,vg3,INSERT_VALUES,vl3);
472: DMRestoreGlobalVector(da3,&vg3);
473: }
474: if (fa->comm[0]) {
475: VecPlaceArray(vl1,localarray+fa->offl[0]);
476: VecPlaceArray(vg1,toarray+offt[0]);
477: DMGlobalToLocalBegin(da1,vg1,INSERT_VALUES,vl1);
478: DMGlobalToLocalEnd(da1,vg1,INSERT_VALUES,vl1);
479: DMRestoreGlobalVector(da1,&vg1);
480: }
481: VecRestoreArray(localvec,&localarray);
482: VecRestoreArray(tovec,&toarray);
484: /* no longer need the redundant vector and VecScatter to it */
485: VecScatterDestroy(&vscat);
486: VecDestroy(&tovec);
488: /* Create final scatter that goes directly from globalvec to localvec */
489: /* this is the one to be used in the application code */
490: PetscMalloc(nlocal*sizeof(PetscInt),&indices);
491: VecGetArray(localvec,&localarray);
492: for (i=0; i<nlocal; i++) {
493: indices[i] = (PetscInt) (localarray[i]);
494: }
495: VecRestoreArray(localvec,&localarray);
496: ISCreateBlock(PETSC_COMM_WORLD,2,nlocal,indices,PETSC_COPY_VALUES,&is);
497: PetscFree(indices);
499: VecCreateSeq(PETSC_COMM_SELF,2*nlocal,&fa->l);
500: VecCreateMPI(PETSC_COMM_WORLD,2*fromnglobal,PETSC_DETERMINE,&fa->g);
502: VecScatterCreate(fa->g,is,fa->l,NULL,&fa->vscat);
503: ISDestroy(&is);
505: VecDestroy(&globalvec);
506: VecDestroy(&localvec);
507: if (fa->comm[0]) {
508: DMRestoreLocalVector(da1,&vl1);
509: DMDestroy(&da1);
510: }
511: if (fa->comm[1]) {
512: DMRestoreLocalVector(da2,&vl2);
513: DMDestroy(&da2);
514: }
515: if (fa->comm[2]) {
516: DMRestoreLocalVector(da3,&vl3);
517: DMDestroy(&da3);
518: }
519: *infa = fa;
520: return(0);
521: }
523: /* Crude graphics to test that the ghost points are properly updated */
524: #include <petscdraw.h>
526: typedef struct {
527: PetscInt m[3],n[3];
528: PetscScalar *xy[3];
529: } ZoomCtx;
531: PetscErrorCode DrawPatch(PetscDraw draw,void *ctx)
532: {
533: ZoomCtx *zctx = (ZoomCtx*)ctx;
535: PetscInt m,n,i,j,id,k;
536: PetscReal x1,x2,x3,x4,y_1,y2,y3,y4;
537: PetscScalar *xy;
540: for (k=0; k<3; k++) {
541: m = zctx->m[k];
542: n = zctx->n[k];
543: xy = zctx->xy[k];
545: for (j=0; j<n-1; j++) {
546: for (i=0; i<m-1; i++) {
547: id = i+j*m; x1 = xy[2*id];y_1 = xy[2*id+1];
548: id = i+j*m+1; x2 = xy[2*id];y2 = xy[2*id+1];
549: id = i+j*m+1+m;x3 = xy[2*id];y3 = xy[2*id+1];
550: id = i+j*m+m; x4 = xy[2*id];y4 = xy[2*id+1];
551: PetscDrawLine(draw,x1,y_1,x2,y2,PETSC_DRAW_BLACK);
552: PetscDrawLine(draw,x2,y2,x3,y3,PETSC_DRAW_BLACK);
553: PetscDrawLine(draw,x3,y3,x4,y4,PETSC_DRAW_BLACK);
554: PetscDrawLine(draw,x4,y4,x1,y_1,PETSC_DRAW_BLACK);
555: }
556: }
557: }
558: PetscDrawFlush(draw);
559: return(0);
560: }
562: PetscErrorCode DrawFA(FA fa,Vec v)
563: {
565: PetscScalar *va;
566: ZoomCtx zctx;
567: PetscReal xmint = 10000.0,xmaxt = -10000.0,ymint = 100000.0,ymaxt = -10000.0;
568: PetscReal xmin,xmax,ymin,ymax;
569: PetscInt i,vn,ln,j;
572: VecGetArray(v,&va);
573: VecGetSize(v,&vn);
574: VecGetSize(fa->l,&ln);
575: for (j=0; j<3; j++) {
576: if (vn == ln) {
577: zctx.xy[j] = va + 2*fa->offl[j];
578: zctx.m[j] = fa->ml[j];
579: zctx.n[j] = fa->nl[j];
580: } else {
581: zctx.xy[j] = va + 2*fa->offg[j];
582: zctx.m[j] = fa->mg[j];
583: zctx.n[j] = fa->ng[j];
584: }
585: for (i=0; i<zctx.m[j]*zctx.n[j]; i++) {
586: if (zctx.xy[j][2*i] > xmax) xmax = zctx.xy[j][2*i];
587: if (zctx.xy[j][2*i] < xmin) xmin = zctx.xy[j][2*i];
588: if (zctx.xy[j][2*i+1] > ymax) ymax = zctx.xy[j][2*i+1];
589: if (zctx.xy[j][2*i+1] < ymin) ymin = zctx.xy[j][2*i+1];
590: }
591: }
592: MPI_Allreduce(&xmin,&xmint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
593: MPI_Allreduce(&xmax,&xmaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
594: MPI_Allreduce(&ymin,&ymint,1,MPI_DOUBLE,MPI_MIN,PETSC_COMM_WORLD);
595: MPI_Allreduce(&ymax,&ymaxt,1,MPI_DOUBLE,MPI_MAX,PETSC_COMM_WORLD);
596: xmin = xmint - .2*(xmaxt - xmint);
597: xmax = xmaxt + .2*(xmaxt - xmint);
598: ymin = ymint - .2*(ymaxt - ymint);
599: ymax = ymaxt + .2*(ymaxt - ymint);
600: #if defined(PETSC_HAVE_X) || defined(PETSC_HAVE_OPENGL)
601: {
602: PetscDraw draw;
604: PetscDrawCreate(PETSC_COMM_WORLD,0,"meshes",PETSC_DECIDE,PETSC_DECIDE,700,700,&draw);
605: PetscDrawSetFromOptions(draw);
606: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
607: PetscDrawZoom(draw,DrawPatch,&zctx);
608: VecRestoreArray(v,&va);
609: PetscDrawDestroy(&draw);
610: }
611: #endif
612: return(0);
613: }
615: /* crude mappings from rectangular arrays to the true geometry. These are ONLY for testing!
616: they will not be used the actual code */
617: PetscErrorCode FAMapRegion3(FA fa,Vec g)
618: {
620: PetscReal R = 1.0,Rscale,Ascale;
621: PetscInt i,k,x,y,m,n;
622: Field **ga;
625: Rscale = R/(fa->r2-1);
626: Ascale = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
628: FAGetGlobalCorners(fa,2,&x,&y,&m,&n);
629: FAGetGlobalArray(fa,g,2,&ga);
630: for (k=y; k<y+n; k++) {
631: for (i=x; i<x+m; i++) {
632: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(1.*PETSC_PI/6. + i*Ascale);
633: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(1.*PETSC_PI/6. + i*Ascale) - 4.*R;
634: }
635: }
636: FARestoreGlobalArray(fa,g,2,&ga);
637: return(0);
638: }
640: PetscErrorCode FAMapRegion2(FA fa,Vec g)
641: {
643: PetscReal R = 1.0,Rscale,Ascale;
644: PetscInt i,k,x,y,m,n;
645: Field **ga;
648: Rscale = R/(fa->r2-1);
649: Ascale = 2.0*PETSC_PI/fa->p2;
651: FAGetGlobalCorners(fa,1,&x,&y,&m,&n);
652: FAGetGlobalArray(fa,g,1,&ga);
653: for (k=y; k<y+n; k++) {
654: for (i=x; i<x+m; i++) {
655: ga[k][i].X = (R + k*Rscale)*PetscCosScalar(i*Ascale - PETSC_PI/2.0);
656: ga[k][i].Y = (R + k*Rscale)*PetscSinScalar(i*Ascale - PETSC_PI/2.0);
657: }
658: }
659: FARestoreGlobalArray(fa,g,1,&ga);
660: return(0);
661: }
663: PetscErrorCode FAMapRegion1(FA fa,Vec g)
664: {
666: PetscReal R = 1.0,Rscale,Ascale1,Ascale3;
667: PetscInt i,k,x,y,m,n;
668: Field **ga;
671: Rscale = R/(fa->r1-1);
672: Ascale1 = 2.0*PETSC_PI/fa->p2;
673: Ascale3 = 2.0*PETSC_PI/(3.0*(fa->p1 - fa->p2 - 1));
675: FAGetGlobalCorners(fa,0,&x,&y,&m,&n);
676: FAGetGlobalArray(fa,g,0,&ga);
678: /* This mapping is WRONG! Not sure how to do it so I've done a poor job of
679: it. You can see that the grid connections are correct. */
680: for (k=y; k<y+n; k++) {
681: for (i=x; i<x+m; i++) {
682: if (i < (fa->p1-fa->p2)/2) {
683: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(i*Ascale3);
684: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(i*Ascale3) - 4.*R;
685: } else if (i > fa->p2 + (fa->p1 - fa->p2)/2) {
686: ga[k][i].X = (2.0*R + k*Rscale)*PetscCosScalar(PETSC_PI+i*Ascale3);
687: ga[k][i].Y = (2.0*R + k*Rscale)*PetscSinScalar(PETSC_PI+i*Ascale3) - 4.*R;
688: } else {
689: ga[k][i].X = (2.*R + k*Rscale)*PetscCosScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
690: ga[k][i].Y = (2.*R + k*Rscale)*PetscSinScalar((i-(fa->p1-fa->p2)/2)*Ascale1 - PETSC_PI/2.0);
691: }
692: }
693: }
694: FARestoreGlobalArray(fa,g,0,&ga);
695: return(0);
696: }
698: /* Simple test to check that the ghost points are properly updated */
699: PetscErrorCode FATest(FA fa)
700: {
702: Vec l,g;
703: Field **la;
704: PetscInt x,y,m,n,j,i,k,p;
705: PetscMPIInt rank;
708: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
710: FAGetGlobalVector(fa,&g);
711: FAGetLocalVector(fa,&l);
713: /* fill up global vector of one region at a time with ITS logical coordinates, then update LOCAL
714: vector; print local vectors to confirm they are correctly filled */
715: for (j=0; j<3; j++) {
716: VecSet(g,0.0);
717: FAGetGlobalCorners(fa,j,&x,&y,&m,&n);
718: PetscPrintf(PETSC_COMM_WORLD,"\nFilling global region %d, showing local results \n",j+1);
719: FAGetGlobalArray(fa,g,j,&la);
720: for (k=y; k<y+n; k++) {
721: for (i=x; i<x+m; i++) {
722: la[k][i].X = i;
723: la[k][i].Y = k;
724: }
725: }
726: FARestoreGlobalArray(fa,g,j,&la);
728: FAGlobalToLocal(fa,g,l);
729: DrawFA(fa,g);
730: DrawFA(fa,l);
732: for (p=0; p<3; p++) {
733: FAGetLocalCorners(fa,p,&x,&y,&m,&n);
734: FAGetLocalArray(fa,l,p,&la);
735: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n[%d] Local array for region %d \n",rank,p+1);
736: for (k=y+n-1; k>=y; k--) { /* print in reverse order to match diagram in paper */
737: for (i=x; i<x+m; i++) {
738: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"(%G,%G) ",la[k][i].X,la[k][i].Y);
739: }
740: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\n");
741: }
742: FARestoreLocalArray(fa,l,p,&la);
743: PetscSynchronizedFlush(PETSC_COMM_WORLD);
744: }
745: }
746: VecDestroy(&g);
747: VecDestroy(&l);
748: return(0);
749: }
753: int main(int argc,char **argv)
754: {
755: FA fa;
757: Vec g,l;
759: PetscInitialize(&argc,&argv,0,help);
761: FACreate(&fa);
762: /* FATest(fa);*/
764: FAGetGlobalVector(fa,&g);
765: FAGetLocalVector(fa,&l);
767: FAMapRegion1(fa,g);
768: FAMapRegion2(fa,g);
769: FAMapRegion3(fa,g);
771: FAGlobalToLocal(fa,g,l);
772: DrawFA(fa,g);
773: DrawFA(fa,l);
775: VecDestroy(&g);
776: VecDestroy(&l);
777: FADestroy(&fa);
779: PetscFinalize();
780: return 0;
781: }