Actual source code: ex6.c
petsc-3.7.3 2016-08-01
1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";
3: #include <petscdm.h>
4: #include <petscdmda.h>
5: #include <petscao.h>
9: int main(int argc,char **argv)
10: {
11: PetscMPIInt rank;
12: PetscInt M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
13: PetscErrorCode ierr;
14: PetscInt Xs,Xm,Ys,Ym,Zs,Zm,iloc,*iglobal;
15: const PetscInt *ltog;
16: PetscInt *lx = NULL,*ly = NULL,*lz = NULL;
17: PetscBool test_order = PETSC_FALSE;
18: DM da;
19: PetscViewer viewer;
20: Vec local,global;
21: PetscScalar value;
22: DMBoundaryType bx = DM_BOUNDARY_NONE,by = DM_BOUNDARY_NONE,bz = DM_BOUNDARY_NONE;
23: DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
24: AO ao;
25: PetscBool flg = PETSC_FALSE;
27: PetscInitialize(&argc,&argv,(char*)0,help);
28: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);
30: /* Read options */
31: PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);
32: PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);
33: PetscOptionsGetInt(NULL,NULL,"-NZ",&P,NULL);
34: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
35: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
36: PetscOptionsGetInt(NULL,NULL,"-p",&p,NULL);
37: PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);
38: PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);
39: flg = PETSC_FALSE;
40: PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);
41: if (flg) stencil_type = DMDA_STENCIL_STAR;
42: flg = PETSC_FALSE;
43: PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);
44: if (flg) stencil_type = DMDA_STENCIL_BOX;
46: flg = PETSC_FALSE;
47: PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);
48: if (flg) bx = DM_BOUNDARY_PERIODIC;
49: flg = PETSC_FALSE;
50: PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);
51: if (flg) bx = DM_BOUNDARY_GHOSTED;
52: flg = PETSC_FALSE;
53: PetscOptionsGetBool(NULL,NULL,"-xnonghosted",&flg,NULL);
55: flg = PETSC_FALSE;
56: PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);
57: if (flg) by = DM_BOUNDARY_PERIODIC;
58: flg = PETSC_FALSE;
59: PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);
60: if (flg) by = DM_BOUNDARY_GHOSTED;
61: flg = PETSC_FALSE;
62: PetscOptionsGetBool(NULL,NULL,"-ynonghosted",&flg,NULL);
64: flg = PETSC_FALSE;
65: PetscOptionsGetBool(NULL,NULL,"-zperiodic",&flg,NULL);
66: if (flg) bz = DM_BOUNDARY_PERIODIC;
67: flg = PETSC_FALSE;
68: PetscOptionsGetBool(NULL,NULL,"-zghosted",&flg,NULL);
69: if (flg) bz = DM_BOUNDARY_GHOSTED;
70: flg = PETSC_FALSE;
71: PetscOptionsGetBool(NULL,NULL,"-znonghosted",&flg,NULL);
73: PetscOptionsGetBool(NULL,NULL,"-testorder",&test_order,NULL);
75: flg = PETSC_FALSE;
76: PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);
77: if (flg) {
78: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -m option with -distribute option");
79: PetscMalloc1(m,&lx);
80: for (i=0; i<m-1; i++) lx[i] = 4;
81: lx[m-1] = M - 4*(m-1);
82: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -n option with -distribute option");
83: PetscMalloc1(n,&ly);
84: for (i=0; i<n-1; i++) ly[i] = 2;
85: ly[n-1] = N - 2*(n-1);
86: if (p == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -p option with -distribute option");
87: PetscMalloc1(p,&lz);
88: for (i=0; i<p-1; i++) lz[i] = 2;
89: lz[p-1] = P - 2*(p-1);
90: }
92: /* Create distributed array and get vectors */
93: DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);
94: PetscFree(lx);
95: PetscFree(ly);
96: PetscFree(lz);
97: DMView(da,viewer);
98: DMCreateGlobalVector(da,&global);
99: DMCreateLocalVector(da,&local);
101: /* Set global vector; send ghost points to local vectors */
102: value = 1;
103: VecSet(global,value);
104: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
105: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
107: /* Scale local vectors according to processor rank; pass to global vector */
108: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
109: value = rank;
110: VecScale(local,value);
111: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
112: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
114: if (!test_order) { /* turn off printing when testing ordering mappings */
115: if (M*N*P<40) {
116: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
117: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
118: PetscPrintf(PETSC_COMM_WORLD,"\n");
119: }
120: }
122: /* Send ghost points to local vectors */
123: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
124: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
126: flg = PETSC_FALSE;
127: PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);
128: if (flg) {
129: PetscViewer sviewer;
130: PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
131: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
132: PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
133: VecView(local,sviewer);
134: PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
135: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
136: PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
137: }
139: /* Tests mappings betweeen application/PETSc orderings */
140: if (test_order) {
141: ISLocalToGlobalMapping ltogm;
143: DMGetLocalToGlobalMapping(da,<ogm);
144: ISLocalToGlobalMappingGetSize(ltogm,&nloc);
145: ISLocalToGlobalMappingGetIndices(ltogm,<og);
147: DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
148: DMDAGetAO(da,&ao);
149: /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
150: PetscMalloc1(nloc,&iglobal);
152: /* Set iglobal to be global indices for each processor's local and ghost nodes,
153: using the DMDA ordering of grid points */
154: kk = 0;
155: for (k=Zs; k<Zs+Zm; k++) {
156: for (j=Ys; j<Ys+Ym; j++) {
157: for (i=Xs; i<Xs+Xm; i++) {
158: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
159: for (l=0; l<w; l++) {
160: iglobal[kk++] = ltog[iloc+l];
161: }
162: }
163: }
164: }
166: /* Map this to the application ordering (which for DMDAs is just the natural ordering
167: that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
168: AOPetscToApplication(ao,nloc,iglobal);
170: /* Then map the application ordering back to the PETSc DMDA ordering */
171: AOApplicationToPetsc(ao,nloc,iglobal);
173: /* Verify the mappings */
174: kk=0;
175: for (k=Zs; k<Zs+Zm; k++) {
176: for (j=Ys; j<Ys+Ym; j++) {
177: for (i=Xs; i<Xs+Xm; i++) {
178: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
179: for (l=0; l<w; l++) {
180: if (iglobal[kk] != ltog[iloc+l]) {
181: PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
182: rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
183: }
184: kk++;
185: }
186: }
187: }
188: }
189: PetscFree(iglobal);
190: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
191: }
193: /* Free memory */
194: PetscViewerDestroy(&viewer);
195: VecDestroy(&local);
196: VecDestroy(&global);
197: DMDestroy(&da);
198: PetscFinalize();
199: return 0;
200: }