Actual source code: ex14.c
1: static char help[] = "Tests DMCreateDomainDecomposition.\n\n";
3: /*
4: Use the options
5: -da_grid_x <nx> - number of grid points in x direction, if M < 0
6: -da_grid_y <ny> - number of grid points in y direction, if N < 0
7: -da_processors_x <MX> number of processors in x directio
8: -da_processors_y <MY> number of processors in x direction
9: */
11: #include <petscdm.h>
12: #include <petscdmda.h>
14: PetscErrorCode FillLocalSubdomain(DM da, Vec gvec)
15: {
16: DMDALocalInfo info;
17: PetscMPIInt rank;
18: PetscInt i, j, k, l;
20: PetscFunctionBeginUser;
21: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
22: PetscCall(DMDAGetLocalInfo(da, &info));
24: if (info.dim == 3) {
25: PetscScalar ***g;
26: PetscCall(DMDAVecGetArray(da, gvec, &g));
27: /* loop over ghosts */
28: for (k = info.zs; k < info.zs + info.zm; k++) {
29: for (j = info.ys; j < info.ys + info.ym; j++) {
30: for (i = info.xs; i < info.xs + info.xm; i++) {
31: g[k][j][info.dof * i + 0] = i;
32: g[k][j][info.dof * i + 1] = j;
33: g[k][j][info.dof * i + 2] = k;
34: }
35: }
36: }
37: PetscCall(DMDAVecRestoreArray(da, gvec, &g));
38: }
39: if (info.dim == 2) {
40: PetscScalar **g;
41: PetscCall(DMDAVecGetArray(da, gvec, &g));
42: /* loop over ghosts */
43: for (j = info.ys; j < info.ys + info.ym; j++) {
44: for (i = info.xs; i < info.xs + info.xm; i++) {
45: for (l = 0; l < info.dof; l++) {
46: g[j][info.dof * i + 0] = i;
47: g[j][info.dof * i + 1] = j;
48: g[j][info.dof * i + 2] = rank;
49: }
50: }
51: }
52: PetscCall(DMDAVecRestoreArray(da, gvec, &g));
53: }
54: PetscFunctionReturn(PETSC_SUCCESS);
55: }
57: int main(int argc, char **argv)
58: {
59: DM da, *subda;
60: PetscInt i, dim = 3;
61: PetscInt M = 25, N = 25, P = 25;
62: PetscMPIInt size, rank;
63: Vec v;
64: Vec slvec, sgvec;
65: IS *ois, *iis;
66: VecScatter oscata;
67: VecScatter *iscat, *oscat, *gscat;
68: DMDALocalInfo info;
69: PetscBool patchis_offproc = PETSC_TRUE;
71: PetscFunctionBeginUser;
72: PetscCall(PetscInitialize(&argc, &argv, (char *)0, help));
73: PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL));
75: /* Create distributed array and get vectors */
76: PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
77: PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
78: if (dim == 2) {
79: PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, &da));
80: } else if (dim == 3) {
81: PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, P, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, NULL, &da));
82: }
83: PetscCall(DMSetFromOptions(da));
84: PetscCall(DMSetUp(da));
85: PetscCall(DMDAGetLocalInfo(da, &info));
87: PetscCall(DMCreateDomainDecomposition(da, NULL, NULL, &iis, &ois, &subda));
88: PetscCall(DMCreateDomainDecompositionScatters(da, 1, subda, &iscat, &oscat, &gscat));
90: {
91: DMDALocalInfo subinfo;
92: MatStencil lower, upper;
93: IS patchis;
94: Vec smallvec;
95: Vec largevec;
96: VecScatter patchscat;
98: PetscCall(DMDAGetLocalInfo(subda[0], &subinfo));
100: lower.i = info.xs;
101: lower.j = info.ys;
102: lower.k = info.zs;
103: upper.i = info.xs + info.xm;
104: upper.j = info.ys + info.ym;
105: upper.k = info.zs + info.zm;
107: /* test the patch IS as a thing to scatter to/from */
108: PetscCall(DMDACreatePatchIS(da, &lower, &upper, &patchis, patchis_offproc));
109: PetscCall(DMGetGlobalVector(da, &largevec));
111: PetscCall(VecCreate(PETSC_COMM_SELF, &smallvec));
112: PetscCall(VecSetSizes(smallvec, info.dof * (upper.i - lower.i) * (upper.j - lower.j) * (upper.k - lower.k), PETSC_DECIDE));
113: PetscCall(VecSetFromOptions(smallvec));
114: PetscCall(VecScatterCreate(smallvec, NULL, largevec, patchis, &patchscat));
116: PetscCall(FillLocalSubdomain(subda[0], smallvec));
117: PetscCall(VecSet(largevec, 0));
119: PetscCall(VecScatterBegin(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
120: PetscCall(VecScatterEnd(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD));
121: PetscCall(ISView(patchis, PETSC_VIEWER_STDOUT_WORLD));
122: PetscCall(VecScatterView(patchscat, PETSC_VIEWER_STDOUT_WORLD));
124: for (i = 0; i < size; i++) {
125: if (i == rank) PetscCall(VecView(smallvec, PETSC_VIEWER_STDOUT_SELF));
126: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
127: }
129: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
130: PetscCall(VecView(largevec, PETSC_VIEWER_STDOUT_WORLD));
132: PetscCall(VecDestroy(&smallvec));
133: PetscCall(DMRestoreGlobalVector(da, &largevec));
134: PetscCall(ISDestroy(&patchis));
135: PetscCall(VecScatterDestroy(&patchscat));
136: }
138: /* view the various parts */
139: {
140: for (i = 0; i < size; i++) {
141: if (i == rank) {
142: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
143: PetscCall(DMView(subda[0], PETSC_VIEWER_STDOUT_SELF));
144: }
145: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
146: }
148: PetscCall(DMGetLocalVector(subda[0], &slvec));
149: PetscCall(DMGetGlobalVector(subda[0], &sgvec));
150: PetscCall(DMGetGlobalVector(da, &v));
152: /* test filling outer between the big DM and the small ones with the IS scatter*/
153: PetscCall(VecScatterCreate(v, ois[0], sgvec, NULL, &oscata));
155: PetscCall(FillLocalSubdomain(subda[0], sgvec));
157: PetscCall(VecScatterBegin(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));
158: PetscCall(VecScatterEnd(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE));
160: /* test the local-to-local scatter */
162: /* fill up the local subdomain and then add them together */
163: PetscCall(FillLocalSubdomain(da, v));
165: PetscCall(VecScatterBegin(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));
166: PetscCall(VecScatterEnd(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD));
168: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
170: /* test ghost scattering backwards */
172: PetscCall(VecSet(v, 0));
174: PetscCall(VecScatterBegin(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));
175: PetscCall(VecScatterEnd(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE));
177: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
179: /* test overlap scattering backwards */
181: PetscCall(DMLocalToGlobalBegin(subda[0], slvec, ADD_VALUES, sgvec));
182: PetscCall(DMLocalToGlobalEnd(subda[0], slvec, ADD_VALUES, sgvec));
184: PetscCall(VecSet(v, 0));
186: PetscCall(VecScatterBegin(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
187: PetscCall(VecScatterEnd(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
189: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
191: /* test interior scattering backwards */
193: PetscCall(VecSet(v, 0));
195: PetscCall(VecScatterBegin(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
196: PetscCall(VecScatterEnd(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE));
198: PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD));
200: /* test matrix allocation */
201: for (i = 0; i < size; i++) {
202: if (i == rank) {
203: Mat m;
204: PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i));
205: PetscCall(DMSetMatType(subda[0], MATAIJ));
206: PetscCall(DMCreateMatrix(subda[0], &m));
207: PetscCall(MatView(m, PETSC_VIEWER_STDOUT_SELF));
208: PetscCall(MatDestroy(&m));
209: }
210: PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
211: }
212: PetscCall(DMRestoreLocalVector(subda[0], &slvec));
213: PetscCall(DMRestoreGlobalVector(subda[0], &sgvec));
214: PetscCall(DMRestoreGlobalVector(da, &v));
215: }
217: PetscCall(DMDestroy(&subda[0]));
218: PetscCall(ISDestroy(&ois[0]));
219: PetscCall(ISDestroy(&iis[0]));
221: PetscCall(VecScatterDestroy(&iscat[0]));
222: PetscCall(VecScatterDestroy(&oscat[0]));
223: PetscCall(VecScatterDestroy(&gscat[0]));
224: PetscCall(VecScatterDestroy(&oscata));
226: PetscCall(PetscFree(iscat));
227: PetscCall(PetscFree(oscat));
228: PetscCall(PetscFree(gscat));
229: PetscCall(PetscFree(oscata));
231: PetscCall(PetscFree(subda));
232: PetscCall(PetscFree(ois));
233: PetscCall(PetscFree(iis));
235: PetscCall(DMDestroy(&da));
236: PetscCall(PetscFinalize());
237: return 0;
238: }