13 std::string
example = TestDir +
"unittest/io/gcrm_r3.nc";
16 #if defined( MOAB_HAVE_MPI ) && defined( MOAB_HAVE_ZOLTAN )
21 #if defined( MOAB_HAVE_MPI ) && defined( MOAB_HAVE_ZOLTAN )
37 const double eps = 1e-6;
40 int main(
int argc,
char* argv[] )
42 MPI_Init( &argc, &argv );
46 #if defined( MOAB_HAVE_MPI ) && defined( MOAB_HAVE_ZOLTAN )
51 #if defined( MOAB_HAVE_MPI ) && defined( MOAB_HAVE_ZOLTAN )
105 read_options =
"PARALLEL=READ_PART;PARTITION_METHOD=TRIVIAL;NO_EDGES;VARIABLE=vorticity";
107 read_options =
"PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN;NO_EDGES;VARIABLE=vorticity;DEBUG_IO=1";
124 std::vector< int > gids( local_cells.
size() );
126 Range local_cell_gids;
127 std::copy( gids.rbegin(), gids.rend(),
range_inserter( local_cell_gids ) );
145 double vorticity0_val[3 *
layers];
146 double vorticity1_val[3 *
layers];
161 EntityHandle cell_ents[] = { local_cells[0], local_cells[159], local_cells[318] };
195 EntityHandle cell_ents[] = { local_cells[0], local_cells[161], local_cells[322] };
227 EntityHandle cell_ents[] = { local_cells[0], local_cells[160], local_cells[320] };
293 read_options =
"PARALLEL=READ_PART;PARTITION_METHOD=TRIVIAL;PARALLEL_RESOLVE_SHARED_ENTS;VARIABLE=";
295 read_options =
"PARALLEL=READ_PART;PARTITION_METHOD=RCBZOLTAN;PARALLEL_RESOLVE_SHARED_ENTS;VARIABLE=";
309 int verts_num = local_verts.
size();
330 verts_num = local_verts.
size();
353 int edges_num = local_edges.
size();
374 edges_num = local_edges.
size();
399 int cells_num = local_cells.
size();
415 cells_num = local_cells.
size();
429 std::cout <<
"proc: " <<
rank <<
" verts:" << verts_num <<
"\n";
432 MPI_Reduce( &verts_num, &total_verts_num, 1, MPI_INT, MPI_SUM, 0, pcomm->
proc_config().
proc_comm() );
435 std::cout <<
"total vertices: " << total_verts_num <<
"\n";
439 std::cout <<
"proc: " <<
rank <<
" edges:" << edges_num <<
"\n";
442 MPI_Reduce( &edges_num, &total_edges_num, 1, MPI_INT, MPI_SUM, 0, pcomm->
proc_config().
proc_comm() );
445 std::cout <<
"total edges: " << total_edges_num <<
"\n";
449 std::cout <<
"proc: " <<
rank <<
" cells:" << cells_num <<
"\n";
452 MPI_Reduce( &cells_num, &total_cells_num, 1, MPI_INT, MPI_SUM, 0, pcomm->
proc_config().
proc_comm() );
455 std::cout <<
"total cells: " << total_cells_num <<
"\n";
459 #ifdef MOAB_HAVE_HDF5_PARALLEL
460 std::string write_options(
"PARALLEL=WRITE_PART;" );
462 std::string output_file =
"test_gcrm";
463 if( rcbzoltan ) output_file +=
"_rcbzoltan";
464 output_file +=
".h5m";
466 mb.
write_file( output_file.c_str(), NULL, write_options.c_str() );
478 read_options =
"PARALLEL=READ_PART;PARTITION_METHOD=TRIVIAL;PARALLEL_RESOLVE_SHARED_ENTS";
479 std::ostringstream gather_set_option;
480 gather_set_option <<
";GATHER_SET=" << gather_set_rank;
490 if( gather_set_rank < 0 || gather_set_rank >= procs )
return;
492 Range cells, cells_owned;
499 if( gather_set_rank ==
rank )
505 assert( gather_set != 0 );
508 Tag vorticity_tag0, gid_tag;
513 pcomm->
gather_data( cells_owned, vorticity_tag0, gid_tag, gather_set, gather_set_rank );
515 if( gather_set_rank ==
rank )
518 Range gather_set_cells;
525 EntityHandle cell_ents[] = { gather_set_cells[0], gather_set_cells[320], gather_set_cells[321],
526 gather_set_cells[641] };
527 double vorticity0_val[4 *
layers];
555 read_options =
"PARALLEL=READ_PART;PARTITION;NOMESH;VARIABLE=;PARTITION_METHOD=TRIVIAL";
560 read_options =
"PARALLEL=READ_PART;PARTITION;PARALLEL_RESOLVE_SHARED_ENTS;PARTITION_METHOD="
566 read_options =
"PARALLEL=READ_PART;PARTITION;PARTITION_METHOD=TRIVIAL;NOMESH;VARIABLE="
567 "vorticity;TIMESTEP=0";
596 double vorticity0_val[3 *
layers];
597 EntityHandle cell_ents[] = { local_cells[0], local_cells[160], local_cells[320] };