Actual source code: ex4.c
petsc-3.13.6 2020-09-29
2: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
4: #include <petscdm.h>
5: #include <petscdmda.h>
7: int main(int argc,char **argv)
8: {
9: PetscMPIInt rank;
10: PetscErrorCode ierr;
11: PetscInt M = 10,N = 8,m = PETSC_DECIDE;
12: PetscInt s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
13: PetscInt Xs,Xm,Ys,Ym,iloc,*iglobal;
14: const PetscInt *ltog;
15: PetscInt *lx = NULL,*ly = NULL;
16: PetscBool testorder = PETSC_FALSE,flg;
17: DMBoundaryType bx = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
18: DM da;
19: PetscViewer viewer;
20: Vec local,global;
21: PetscScalar value;
22: DMDAStencilType st = DMDA_STENCIL_BOX;
23: AO ao;
25: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
26: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);
28: /* Readoptions */
29: PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);
30: PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);
31: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
32: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
33: PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);
34: PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);
36: flg = PETSC_FALSE;
37: PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL); if (flg) bx = DM_BOUNDARY_PERIODIC;
38: flg = PETSC_FALSE;
39: PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL); if (flg) by = DM_BOUNDARY_PERIODIC;
40: flg = PETSC_FALSE;
41: PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL); if (flg) bx = DM_BOUNDARY_GHOSTED;
42: flg = PETSC_FALSE;
43: PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL); if (flg) by = DM_BOUNDARY_GHOSTED;
44: flg = PETSC_FALSE;
45: PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL); if (flg) st = DMDA_STENCIL_STAR;
46: flg = PETSC_FALSE;
47: PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL); if (flg) st = DMDA_STENCIL_BOX;
48: flg = PETSC_FALSE;
49: PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL);
50: /*
51: Test putting two nodes in x and y on each processor, exact last processor
52: in x and y gets the rest.
53: */
54: flg = PETSC_FALSE;
55: PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);
56: if (flg) {
57: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -m option with -distribute option");
58: PetscMalloc1(m,&lx);
59: for (i=0; i<m-1; i++) { lx[i] = 4;}
60: lx[m-1] = M - 4*(m-1);
61: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must set -n option with -distribute option");
62: PetscMalloc1(n,&ly);
63: for (i=0; i<n-1; i++) { ly[i] = 2;}
64: ly[n-1] = N - 2*(n-1);
65: }
68: /* Create distributed array and get vectors */
69: DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);
70: DMSetFromOptions(da);
71: DMSetUp(da);
72: PetscFree(lx);
73: PetscFree(ly);
75: DMView(da,viewer);
76: DMCreateGlobalVector(da,&global);
77: DMCreateLocalVector(da,&local);
79: /* Set global vector; send ghost points to local vectors */
80: value = 1;
81: VecSet(global,value);
82: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
83: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
85: /* Scale local vectors according to processor rank; pass to global vector */
86: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
87: value = rank;
88: VecScale(local,value);
89: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
90: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
92: if (!testorder) { /* turn off printing when testing ordering mappings */
93: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n");
94: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
95: PetscPrintf(PETSC_COMM_WORLD,"\n\n");
96: }
98: /* Send ghost points to local vectors */
99: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
100: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
102: flg = PETSC_FALSE;
103: PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);
104: if (flg) {
105: PetscViewer sviewer;
107: PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
108: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
109: PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
110: VecView(local,sviewer);
111: PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
112: PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD);
113: PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
114: }
116: /* Tests mappings betweeen Section 1.5 Writing Application Codes with PETSc/PETSc orderings */
117: if (testorder) {
118: ISLocalToGlobalMapping ltogm;
120: DMGetLocalToGlobalMapping(da,<ogm);
121: ISLocalToGlobalMappingGetSize(ltogm,&nloc);
122: ISLocalToGlobalMappingGetIndices(ltogm,<og);
123: DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);
124: DMDAGetAO(da,&ao);
125: PetscMalloc1(nloc,&iglobal);
127: /* Set iglobal to be global indices for each processor's local and ghost nodes,
128: using the DMDA ordering of grid points */
129: kk = 0;
130: for (j=Ys; j<Ys+Ym; j++) {
131: for (i=Xs; i<Xs+Xm; i++) {
132: iloc = w*((j-Ys)*Xm + i-Xs);
133: for (l=0; l<w; l++) {
134: iglobal[kk++] = ltog[iloc+l];
135: }
136: }
137: }
139: /* Map this to the Section 1.5 Writing Application Codes with PETSc ordering (which for DMDAs is just the natural ordering
140: that would be used for 1 processor, numbering most rapidly by x, then y) */
141: AOPetscToApplication(ao,nloc,iglobal);
143: /* Then map the Section 1.5 Writing Application Codes with PETSc ordering back to the PETSc DMDA ordering */
144: AOApplicationToPetsc(ao,nloc,iglobal);
146: /* Verify the mappings */
147: kk=0;
148: for (j=Ys; j<Ys+Ym; j++) {
149: for (i=Xs; i<Xs+Xm; i++) {
150: iloc = w*((j-Ys)*Xm + i-Xs);
151: for (l=0; l<w; l++) {
152: if (iglobal[kk] != ltog[iloc+l]) {
153: PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]);
154: }
155: kk++;
156: }
157: }
158: }
159: PetscFree(iglobal);
160: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
161: }
163: /* Free memory */
164: PetscViewerDestroy(&viewer);
165: VecDestroy(&local);
166: VecDestroy(&global);
167: DMDestroy(&da);
169: PetscFinalize();
170: return ierr;
171: }
174: /*TEST
176: test:
177: nsize: 4
178: args: -nox
179: filter: grep -v -i Object
180: requires: x
182: test:
183: suffix: 2
184: args: -testorder -nox
185: requires: x
187: TEST*/