Actual source code: ex6.c
petsc-3.6.4 2016-04-12
1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";
3: #include <petscdm.h>
4: #include <petscdmda.h>
5: #include <petscao.h>
9: int main(int argc,char **argv)
10: {
11: PetscMPIInt rank;
12: PetscInt M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
13: PetscErrorCode ierr;
14: PetscInt Xs,Xm,Ys,Ym,Zs,Zm,iloc,*iglobal;
15: const PetscInt *ltog;
16: PetscInt *lx = NULL,*ly = NULL,*lz = NULL;
17: PetscBool test_order = PETSC_FALSE;
18: DM da;
19: PetscViewer viewer;
20: Vec local,global;
21: PetscScalar value;
22: DMBoundaryType bx = DM_BOUNDARY_NONE,by = DM_BOUNDARY_NONE,bz = DM_BOUNDARY_NONE;
23: DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
24: AO ao;
25: PetscBool flg = PETSC_FALSE;
27: PetscInitialize(&argc,&argv,(char*)0,help);
28: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);
30: /* Read options */
31: PetscOptionsGetInt(NULL,"-NX",&M,NULL);
32: PetscOptionsGetInt(NULL,"-NY",&N,NULL);
33: PetscOptionsGetInt(NULL,"-NZ",&P,NULL);
34: PetscOptionsGetInt(NULL,"-m",&m,NULL);
35: PetscOptionsGetInt(NULL,"-n",&n,NULL);
36: PetscOptionsGetInt(NULL,"-p",&p,NULL);
37: PetscOptionsGetInt(NULL,"-s",&s,NULL);
38: PetscOptionsGetInt(NULL,"-w",&w,NULL);
39: flg = PETSC_FALSE;
40: PetscOptionsGetBool(NULL,"-star",&flg,NULL);
41: if (flg) stencil_type = DMDA_STENCIL_STAR;
42: flg = PETSC_FALSE;
43: PetscOptionsGetBool(NULL,"-box",&flg,NULL);
44: if (flg) stencil_type = DMDA_STENCIL_BOX;
46: flg = PETSC_FALSE;
47: PetscOptionsGetBool(NULL,"-xperiodic",&flg,NULL);
48: if (flg) bx = DM_BOUNDARY_PERIODIC;
49: flg = PETSC_FALSE;
50: PetscOptionsGetBool(NULL,"-xghosted",&flg,NULL);
51: if (flg) bx = DM_BOUNDARY_GHOSTED;
52: flg = PETSC_FALSE;
53: PetscOptionsGetBool(NULL,"-xnonghosted",&flg,NULL);
55: flg = PETSC_FALSE;
56: PetscOptionsGetBool(NULL,"-yperiodic",&flg,NULL);
57: if (flg) by = DM_BOUNDARY_PERIODIC;
58: flg = PETSC_FALSE;
59: PetscOptionsGetBool(NULL,"-yghosted",&flg,NULL);
60: if (flg) by = DM_BOUNDARY_GHOSTED;
61: flg = PETSC_FALSE;
62: PetscOptionsGetBool(NULL,"-ynonghosted",&flg,NULL);
64: flg = PETSC_FALSE;
65: PetscOptionsGetBool(NULL,"-zperiodic",&flg,NULL);
66: if (flg) bz = DM_BOUNDARY_PERIODIC;
67: flg = PETSC_FALSE;
68: PetscOptionsGetBool(NULL,"-zghosted",&flg,NULL);
69: if (flg) bz = DM_BOUNDARY_GHOSTED;
70: flg = PETSC_FALSE;
71: PetscOptionsGetBool(NULL,"-znonghosted",&flg,NULL);
73: PetscOptionsGetBool(NULL,"-testorder",&test_order,NULL);
75: flg = PETSC_FALSE;
76: PetscOptionsGetBool(NULL,"-distribute",&flg,NULL);
77: if (flg) {
78: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -m option with -distribute option");
79: PetscMalloc1(m,&lx);
80: for (i=0; i<m-1; i++) lx[i] = 4;
81: lx[m-1] = M - 4*(m-1);
82: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -n option with -distribute option");
83: PetscMalloc1(n,&ly);
84: for (i=0; i<n-1; i++) ly[i] = 2;
85: ly[n-1] = N - 2*(n-1);
86: if (p == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -p option with -distribute option");
87: PetscMalloc1(p,&lz);
88: for (i=0; i<p-1; i++) lz[i] = 2;
89: lz[p-1] = P - 2*(p-1);
90: }
92: /* Create distributed array and get vectors */
93: DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);
94: PetscFree(lx);
95: PetscFree(ly);
96: PetscFree(lz);
97: DMView(da,viewer);
98: DMCreateGlobalVector(da,&global);
99: DMCreateLocalVector(da,&local);
101: /* Set global vector; send ghost points to local vectors */
102: value = 1;
103: VecSet(global,value);
104: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
105: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
107: /* Scale local vectors according to processor rank; pass to global vector */
108: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
109: value = rank;
110: VecScale(local,value);
111: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
112: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
114: if (!test_order) { /* turn off printing when testing ordering mappings */
115: if (M*N*P<40) {
116: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
117: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
118: PetscPrintf(PETSC_COMM_WORLD,"\n");
119: }
120: }
122: /* Send ghost points to local vectors */
123: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
124: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
126: flg = PETSC_FALSE;
127: PetscOptionsGetBool(NULL,"-local_print",&flg,NULL);
128: if (flg) {
129: PetscViewer sviewer;
130: PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);
131: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
132: PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
133: VecView(local,sviewer);
134: PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
135: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
136: }
138: /* Tests mappings betweeen application/PETSc orderings */
139: if (test_order) {
140: ISLocalToGlobalMapping ltogm;
142: DMGetLocalToGlobalMapping(da,<ogm);
143: ISLocalToGlobalMappingGetSize(ltogm,&nloc);
144: ISLocalToGlobalMappingGetIndices(ltogm,<og);
146: DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
147: DMDAGetAO(da,&ao);
148: /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
149: PetscMalloc1(nloc,&iglobal);
151: /* Set iglobal to be global indices for each processor's local and ghost nodes,
152: using the DMDA ordering of grid points */
153: kk = 0;
154: for (k=Zs; k<Zs+Zm; k++) {
155: for (j=Ys; j<Ys+Ym; j++) {
156: for (i=Xs; i<Xs+Xm; i++) {
157: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
158: for (l=0; l<w; l++) {
159: iglobal[kk++] = ltog[iloc+l];
160: }
161: }
162: }
163: }
165: /* Map this to the application ordering (which for DMDAs is just the natural ordering
166: that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
167: AOPetscToApplication(ao,nloc,iglobal);
169: /* Then map the application ordering back to the PETSc DMDA ordering */
170: AOApplicationToPetsc(ao,nloc,iglobal);
172: /* Verify the mappings */
173: kk=0;
174: for (k=Zs; k<Zs+Zm; k++) {
175: for (j=Ys; j<Ys+Ym; j++) {
176: for (i=Xs; i<Xs+Xm; i++) {
177: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
178: for (l=0; l<w; l++) {
179: if (iglobal[kk] != ltog[iloc+l]) {
180: PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
181: rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
182: }
183: kk++;
184: }
185: }
186: }
187: }
188: PetscFree(iglobal);
189: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
190: }
192: /* Free memory */
193: PetscViewerDestroy(&viewer);
194: VecDestroy(&local);
195: VecDestroy(&global);
196: DMDestroy(&da);
197: PetscFinalize();
198: return 0;
199: }