Actual source code: ex12.c

  1: static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n";

  3: #include <petscdmplex.h>
  4: #include <petscsf.h>

  6: /* Sample usage:

  8: Load a file in serial and distribute it on 24 processes:

 10:   make -f ./gmakefile test search="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24

 12: Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners:

 14:   make -f ./gmakefile test search="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24

 16: Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK:

 18:   make -f ./gmakefile test search="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24

 20: */

 22: enum {
 23:   STAGE_LOAD,
 24:   STAGE_DISTRIBUTE,
 25:   STAGE_REFINE,
 26:   STAGE_REDISTRIBUTE
 27: };

 29: typedef struct {
 30:   /* Domain and mesh definition */
 31:   PetscInt      overlap;          /* The cell overlap to use during partitioning */
 32:   PetscBool     testSection;      /* Use a PetscSection to specify cell weights */
 33:   PetscBool     testPartition;    /* Use a fixed partitioning for testing */
 34:   PetscBool     testRedundant;    /* Use a redundant partitioning for testing */
 35:   PetscBool     loadBalance;      /* Load balance via a second distribute step */
 36:   PetscBool     partitionBalance; /* Balance shared point partition */
 37:   PetscLogStage stages[4];
 38: } AppCtx;

 40: PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
 41: {
 42:   PetscFunctionBegin;
 43:   options->overlap          = 0;
 44:   options->testPartition    = PETSC_FALSE;
 45:   options->testSection      = PETSC_FALSE;
 46:   options->testRedundant    = PETSC_FALSE;
 47:   options->loadBalance      = PETSC_FALSE;
 48:   options->partitionBalance = PETSC_FALSE;

 50:   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
 51:   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL, 0));
 52:   PetscCall(PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL));
 53:   PetscCall(PetscOptionsBool("-test_section", "Use a PetscSection for cell weights", "ex12.c", options->testSection, &options->testSection, NULL));
 54:   PetscCall(PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL));
 55:   PetscCall(PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL));
 56:   PetscCall(PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL));
 57:   PetscOptionsEnd();

 59:   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
 60:   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
 61:   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
 62:   PetscCall(PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE]));
 63:   PetscFunctionReturn(PETSC_SUCCESS);
 64: }

 66: PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
 67: {
 68:   DM          pdm             = NULL;
 69:   PetscInt    triSizes_n2[2]  = {4, 4};
 70:   PetscInt    triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7};
 71:   PetscInt    triSizes_n3[3]  = {3, 2, 3};
 72:   PetscInt    triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4};
 73:   PetscInt    triSizes_n4[4]  = {2, 2, 2, 2};
 74:   PetscInt    triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6};
 75:   PetscInt    triSizes_n8[8]  = {1, 1, 1, 1, 1, 1, 1, 1};
 76:   PetscInt    triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7};
 77:   PetscInt    quadSizes[2]    = {2, 2};
 78:   PetscInt    quadPoints[4]   = {2, 3, 0, 1};
 79:   PetscInt    overlap         = user->overlap >= 0 ? user->overlap : 0;
 80:   PetscInt    dim;
 81:   PetscBool   simplex;
 82:   PetscMPIInt rank, size;

 84:   PetscFunctionBegin;
 85:   PetscCallMPI(MPI_Comm_rank(comm, &rank));
 86:   PetscCallMPI(MPI_Comm_size(comm, &size));
 87:   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
 88:   PetscCall(DMCreate(comm, dm));
 89:   PetscCall(DMSetType(*dm, DMPLEX));
 90:   PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
 91:   PetscCall(DMSetFromOptions(*dm));
 92:   PetscCall(DMGetDimension(*dm, &dim));
 93:   if (user->testSection) {
 94:     PetscInt     numComp[] = {1};
 95:     PetscInt     numDof[]  = {0, 0, 0, 1};
 96:     PetscSection section;

 98:     PetscCall(DMSetNumFields(*dm, 1));
 99:     PetscCall(DMPlexCreateSection(*dm, NULL, numComp, numDof + 3 - dim, 0, NULL, NULL, NULL, NULL, &section));
100:     PetscCall(DMSetLocalSection(*dm, section));
101:     PetscCall(PetscSectionViewFromOptions(section, NULL, "-cell_section_view"));
102:     PetscCall(PetscSectionDestroy(&section));
103:   }
104:   PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view"));
105:   PetscCall(PetscLogStagePop());
106:   PetscCall(DMPlexIsSimplex(*dm, &simplex));
107:   PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
108:   if (!user->testRedundant) {
109:     PetscPartitioner part;

111:     PetscCall(DMPlexGetPartitioner(*dm, &part));
112:     PetscCall(PetscPartitionerSetFromOptions(part));
113:     PetscCall(PetscPartitionerViewFromOptions(part, NULL, "-view_partitioner_pre"));
114:     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
115:     if (user->testPartition) {
116:       const PetscInt *sizes  = NULL;
117:       const PetscInt *points = NULL;

119:       if (rank == 0) {
120:         if (dim == 2 && simplex && size == 2) {
121:           sizes  = triSizes_n2;
122:           points = triPoints_n2;
123:         } else if (dim == 2 && simplex && size == 3) {
124:           sizes  = triSizes_n3;
125:           points = triPoints_n3;
126:         } else if (dim == 2 && simplex && size == 4) {
127:           sizes  = triSizes_n4;
128:           points = triPoints_n4;
129:         } else if (dim == 2 && simplex && size == 8) {
130:           sizes  = triSizes_n8;
131:           points = triPoints_n8;
132:         } else if (dim == 2 && !simplex && size == 2) {
133:           sizes  = quadSizes;
134:           points = quadPoints;
135:         }
136:       }
137:       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
138:       PetscCall(PetscPartitionerShellSetPartition(part, size, sizes, points));
139:     }
140:     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
141:     PetscCall(PetscPartitionerViewFromOptions(part, NULL, "-view_partitioner"));
142:   } else {
143:     PetscSF sf;

145:     PetscCall(DMPlexGetRedundantDM(*dm, &sf, &pdm));
146:     if (sf) {
147:       DM test;

149:       PetscCall(DMPlexCreate(comm, &test));
150:       PetscCall(PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh"));
151:       PetscCall(DMPlexMigrate(*dm, sf, test));
152:       PetscCall(DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view"));
153:       PetscCall(DMDestroy(&test));
154:     }
155:     PetscCall(PetscSFDestroy(&sf));
156:   }
157:   if (pdm) {
158:     PetscCall(DMDestroy(dm));
159:     *dm = pdm;
160:   }
161:   PetscCall(PetscLogStagePop());
162:   PetscCall(DMSetFromOptions(*dm));
163:   if (user->loadBalance) {
164:     PetscPartitioner part;

166:     PetscCall(DMViewFromOptions(*dm, NULL, "-prelb_dm_view"));
167:     PetscCall(DMPlexSetOptionsPrefix(*dm, "lb_"));
168:     PetscCall(PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE]));
169:     PetscCall(DMPlexGetPartitioner(*dm, &part));
170:     PetscCall(PetscPartitionerViewFromOptions(part, NULL, "-view_partitioner_pre"));
171:     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, "lb_"));
172:     PetscCall(PetscPartitionerSetFromOptions(part));
173:     if (user->testPartition) {
174:       PetscInt reSizes_n2[2]  = {2, 2};
175:       PetscInt rePoints_n2[4] = {2, 3, 0, 1};
176:       if (rank) {
177:         rePoints_n2[0] = 1;
178:         rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;
179:       }

181:       PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL));
182:       PetscCall(PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2));
183:     }
184:     PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance));
185:     PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm));
186:     PetscCall(PetscPartitionerViewFromOptions(part, NULL, "-view_partitioner"));
187:     if (pdm) {
188:       PetscCall(DMDestroy(dm));
189:       *dm = pdm;
190:     }
191:     PetscCall(PetscLogStagePop());
192:   }
193:   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
194:   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
195:   PetscCall(PetscLogStagePop());
196:   PetscFunctionReturn(PETSC_SUCCESS);
197: }

199: int main(int argc, char **argv)
200: {
201:   DM     dm;
202:   AppCtx user; /* user-defined work context */

204:   PetscFunctionBeginUser;
205:   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
206:   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
207:   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
208:   PetscCall(DMDestroy(&dm));
209:   PetscCall(PetscFinalize());
210:   return 0;
211: }

213: /*TEST
214:   # Parallel, no overlap tests 0-2
215:   test:
216:     suffix: 0
217:     requires: triangle
218:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -petscpartitioner_type {{simple multistage}}
219:     output_file: output/empty.out
220:   test:
221:     suffix: 1
222:     requires: triangle
223:     nsize: 3
224:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
225:   test:
226:     suffix: 1_ms_default
227:     requires: triangle
228:     nsize: 3
229:     args: -dm_coord_space 0 -petscpartitioner_type multistage -petscpartitioner_multistage_levels_petscpartitioner_type simple
230:     output_file: output/empty.out
231:   test:
232:     suffix: 1_ms_cellsection
233:     requires: triangle
234:     nsize: 3
235:     args: -dm_coord_space 0 -test_section -dm_view ascii::ascii_info_detail -petscpartitioner_type multistage -petscpartitioner_multistage_levels_petscpartitioner_type simple -petscpartitioner_multistage_node_size 2 -view_partitioner ::ascii_info_detail
236:   test:
237:     suffix: 2
238:     requires: triangle
239:     nsize: 8
240:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail
241:   test:
242:     suffix: 2_ms
243:     requires: triangle
244:     nsize: 8
245:     args: -dm_coord_space 0 -dm_view ascii::ascii_info_detail \
246:           -petscpartitioner_type multistage \
247:           -petscpartitioner_multistage_strategy node -petscpartitioner_multistage_node_size {{1 2 3 4}separate output} -petscpartitioner_multistage_node_interleaved {{0 1}separate output} \
248:           -petscpartitioner_multistage_levels_petscpartitioner_type simple -petscpartitioner_view ascii::ascii_info_detail
249:   # Parallel, level-1 overlap tests 3-4
250:   test:
251:     suffix: 3
252:     requires: triangle
253:     nsize: 3
254:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
255:   test:
256:     suffix: 4
257:     requires: triangle
258:     nsize: 8
259:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
260:   # Parallel, level-2 overlap test 5
261:   test:
262:     suffix: 5
263:     requires: triangle
264:     nsize: 8
265:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail
266:   test:
267:     suffix: 5_ms
268:     requires: triangle
269:     nsize: 8
270:     args: -dm_coord_space 0 -overlap 2 -dm_view ascii::ascii_info_detail \
271:           -petscpartitioner_type multistage \
272:           -petscpartitioner_multistage_strategy msection -petscpartitioner_multistage_msection {{2 3}separate output} \
273:           -petscpartitioner_multistage_levels_petscpartitioner_type simple -petscpartitioner_view ascii::ascii_info_detail
274:   # Parallel load balancing, test 6-7
275:   test:
276:     suffix: 6
277:     requires: triangle
278:     nsize: 2
279:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail
280:   test:
281:     suffix: 7
282:     requires: triangle
283:     nsize: 2
284:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail
285:   # Parallel redundant copying, test 8
286:   test:
287:     suffix: 8
288:     requires: triangle
289:     nsize: 2
290:     args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail
291:   test:
292:     suffix: lb_0
293:     requires: parmetis
294:     nsize: 4
295:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance
296:   test:
297:     suffix: lb_0_ms
298:     requires: parmetis
299:     nsize: 4
300:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type multistage -load_balance -lb_petscpartitioner_view ::ascii_info_detail -prelb_dm_view ::load_balance -dm_view ::load_balance -lb_petscpartitioner_multistage_levels_petscpartitioner_type parmetis -lb_petscpartitioner_multistage_node_size 2

302:   # Same tests as above, but with balancing of the shared point partition
303:   test:
304:     suffix: 9
305:     requires: triangle
306:     args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance -petscpartitioner_type {{simple multistage}}
307:     output_file: output/empty.out
308:   test:
309:     suffix: 10
310:     requires: triangle
311:     nsize: 3
312:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
313:   test:
314:     suffix: 11
315:     requires: triangle
316:     nsize: 8
317:     args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance
318:   test:
319:     suffix: 11_ms
320:     requires: triangle
321:     nsize: 8
322:     args: -dm_coord_space 0 -dm_view ascii::ascii_info_detail -partition_balance \
323:           -petscpartitioner_type multistage \
324:           -petscpartitioner_multistage_strategy node -petscpartitioner_multistage_node_size {{1 2 3 4}separate output} -petscpartitioner_multistage_node_interleaved {{0 1}separate output} \
325:           -petscpartitioner_multistage_levels_petscpartitioner_type simple -petscpartitioner_view ascii::ascii_info_detail
326:   # Parallel, level-1 overlap tests 3-4
327:   test:
328:     suffix: 12
329:     requires: triangle
330:     nsize: 3
331:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
332:   test:
333:     suffix: 13
334:     requires: triangle
335:     nsize: 8
336:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
337:   # Parallel, level-2 overlap test 5
338:   test:
339:     suffix: 14
340:     requires: triangle
341:     nsize: 8
342:     args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance
343:   # Parallel load balancing, test 6-7
344:   test:
345:     suffix: 15
346:     requires: triangle
347:     nsize: 2
348:     args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance
349:   test:
350:     suffix: 16
351:     requires: triangle
352:     nsize: 2
353:     args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance
354:   # Parallel redundant copying, test 8
355:   test:
356:     suffix: 17
357:     requires: triangle
358:     nsize: 2
359:     args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance
360:   test:
361:     suffix: lb_1
362:     requires: parmetis
363:     nsize: 4
364:     args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance
365: TEST*/