Actual source code: ex6.c
petsc-3.13.6 2020-09-29
1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";
3: #include <petscdm.h>
4: #include <petscdmda.h>
5: #include <petscao.h>
7: int main(int argc,char **argv)
8: {
9: PetscMPIInt rank;
10: PetscInt M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
11: PetscErrorCode ierr;
12: PetscInt Xs,Xm,Ys,Ym,Zs,Zm,iloc,*iglobal;
13: const PetscInt *ltog;
14: PetscInt *lx = NULL,*ly = NULL,*lz = NULL;
15: PetscBool test_order = PETSC_FALSE;
16: DM da;
17: PetscViewer viewer;
18: Vec local,global;
19: PetscScalar value;
20: DMBoundaryType bx = DM_BOUNDARY_NONE,by = DM_BOUNDARY_NONE,bz = DM_BOUNDARY_NONE;
21: DMDAStencilType stencil_type = DMDA_STENCIL_BOX;
22: AO ao;
23: PetscBool flg = PETSC_FALSE;
25: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);
28: /* Read options */
29: PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);
30: PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);
31: PetscOptionsGetInt(NULL,NULL,"-NZ",&P,NULL);
32: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
33: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
34: PetscOptionsGetInt(NULL,NULL,"-p",&p,NULL);
35: PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);
36: PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);
37: flg = PETSC_FALSE;
38: PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL);
39: if (flg) stencil_type = DMDA_STENCIL_STAR;
40: flg = PETSC_FALSE;
41: PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL);
42: if (flg) stencil_type = DMDA_STENCIL_BOX;
44: flg = PETSC_FALSE;
45: PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL);
46: if (flg) bx = DM_BOUNDARY_PERIODIC;
47: flg = PETSC_FALSE;
48: PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL);
49: if (flg) bx = DM_BOUNDARY_GHOSTED;
50: flg = PETSC_FALSE;
51: PetscOptionsGetBool(NULL,NULL,"-xnonghosted",&flg,NULL);
53: flg = PETSC_FALSE;
54: PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL);
55: if (flg) by = DM_BOUNDARY_PERIODIC;
56: flg = PETSC_FALSE;
57: PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL);
58: if (flg) by = DM_BOUNDARY_GHOSTED;
59: flg = PETSC_FALSE;
60: PetscOptionsGetBool(NULL,NULL,"-ynonghosted",&flg,NULL);
62: flg = PETSC_FALSE;
63: PetscOptionsGetBool(NULL,NULL,"-zperiodic",&flg,NULL);
64: if (flg) bz = DM_BOUNDARY_PERIODIC;
65: flg = PETSC_FALSE;
66: PetscOptionsGetBool(NULL,NULL,"-zghosted",&flg,NULL);
67: if (flg) bz = DM_BOUNDARY_GHOSTED;
68: flg = PETSC_FALSE;
69: PetscOptionsGetBool(NULL,NULL,"-znonghosted",&flg,NULL);
71: PetscOptionsGetBool(NULL,NULL,"-testorder",&test_order,NULL);
73: flg = PETSC_FALSE;
74: PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);
75: if (flg) {
76: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
77: PetscMalloc1(m,&lx);
78: for (i=0; i<m-1; i++) lx[i] = 4;
79: lx[m-1] = M - 4*(m-1);
80: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
81: PetscMalloc1(n,&ly);
82: for (i=0; i<n-1; i++) ly[i] = 2;
83: ly[n-1] = N - 2*(n-1);
84: if (p == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -p option with -distribute option");
85: PetscMalloc1(p,&lz);
86: for (i=0; i<p-1; i++) lz[i] = 2;
87: lz[p-1] = P - 2*(p-1);
88: }
90: /* Create distributed array and get vectors */
91: DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);
92: DMSetFromOptions(da);
93: DMSetUp(da);
94: PetscFree(lx);
95: PetscFree(ly);
96: PetscFree(lz);
97: DMView(da,viewer);
98: DMCreateGlobalVector(da,&global);
99: DMCreateLocalVector(da,&local);
101: /* Set global vector; send ghost points to local vectors */
102: value = 1;
103: VecSet(global,value);
104: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
105: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
107: /* Scale local vectors according to processor rank; pass to global vector */
108: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
109: value = rank;
110: VecScale(local,value);
111: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
112: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
114: if (!test_order) { /* turn off printing when testing ordering mappings */
115: if (M*N*P<40) {
116: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
117: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
118: PetscPrintf(PETSC_COMM_WORLD,"\n");
119: }
120: }
122: /* Send ghost points to local vectors */
123: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
124: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
126: flg = PETSC_FALSE;
127: PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);
128: if (flg) {
129: PetscViewer sviewer;
130: PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
131: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
132: PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
133: VecView(local,sviewer);
134: PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
135: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
136: PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
137: }
139: /* Tests mappings betweeen Section 1.5 Writing Application Codes with PETSc/PETSc orderings */
140: if (test_order) {
141: ISLocalToGlobalMapping ltogm;
143: DMGetLocalToGlobalMapping(da,<ogm);
144: ISLocalToGlobalMappingGetSize(ltogm,&nloc);
145: ISLocalToGlobalMappingGetIndices(ltogm,<og);
147: DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
148: DMDAGetAO(da,&ao);
149: /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
150: PetscMalloc1(nloc,&iglobal);
152: /* Set iglobal to be global indices for each processor's local and ghost nodes,
153: using the DMDA ordering of grid points */
154: kk = 0;
155: for (k=Zs; k<Zs+Zm; k++) {
156: for (j=Ys; j<Ys+Ym; j++) {
157: for (i=Xs; i<Xs+Xm; i++) {
158: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
159: for (l=0; l<w; l++) {
160: iglobal[kk++] = ltog[iloc+l];
161: }
162: }
163: }
164: }
166: /* Map this to the Section 1.5 Writing Application Codes with PETSc ordering (which for DMDAs is just the natural ordering
167: that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
168: AOPetscToApplication(ao,nloc,iglobal);
170: /* Then map the Section 1.5 Writing Application Codes with PETSc ordering back to the PETSc DMDA ordering */
171: AOApplicationToPetsc(ao,nloc,iglobal);
173: /* Verify the mappings */
174: kk=0;
175: for (k=Zs; k<Zs+Zm; k++) {
176: for (j=Ys; j<Ys+Ym; j++) {
177: for (i=Xs; i<Xs+Xm; i++) {
178: iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
179: for (l=0; l<w; l++) {
180: if (iglobal[kk] != ltog[iloc+l]) {
181: PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
182: }
183: kk++;
184: }
185: }
186: }
187: }
188: PetscFree(iglobal);
189: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
190: }
192: /* Free memory */
193: PetscViewerDestroy(&viewer);
194: VecDestroy(&local);
195: VecDestroy(&global);
196: DMDestroy(&da);
197: PetscFinalize();
198: return ierr;
199: }
201: /*TEST
203: test:
204: args: -testorder -nox
206: TEST*/