MOAB: Mesh Oriented datABase  (version 5.5.0)
uber_parallel_test.cpp
Go to the documentation of this file.
1 #include "moab/ParallelComm.hpp"
3 #include "ReadParallel.hpp"
4 #include "moab/FileOptions.hpp"
5 #include "MBTagConventions.hpp"
6 #include "moab/Core.hpp"
7 #include "moab_mpi.h"
8 #include "TestUtil.hpp"
9 
10 #include <iostream>
11 #include <algorithm>
12 #include <sstream>
13 #include <cassert>
14 #if !defined( _MSC_VER ) && !defined( __MINGW32__ )
15 #include <unistd.h>
16 #endif
17 
18 using namespace moab;
19 
20 #define CHKERR( a ) \
21  do \
22  { \
23  ErrorCode val = ( a ); \
24  if( MB_SUCCESS != val ) \
25  { \
26  std::cerr << "Error code " << val << " at " << __FILE__ << ":" << __LINE__ << std::endl; \
27  return val; \
28  } \
29  } while( false )
30 
31 #define PCHECK( A ) \
32  if( is_any_proc_error( !( A ) ) ) return report_error( __FILE__, __LINE__ )
33 
34 ErrorCode report_error( const char* file, int line )
35 {
36  std::cerr << "Failure at " << file << ':' << line << std::endl;
37  return MB_FAILURE;
38 }
39 
40 ErrorCode test_read( const char* filename, const char* option );
41 
42 #define RUN_TEST_ARG3( A, B, C ) run_test( &( A ), #A, B, C )
43 
44 int is_any_proc_error( int is_my_error )
45 {
46  int result = 0;
47  int err = MPI_Allreduce( &is_my_error, &result, 1, MPI_INT, MPI_MAX, MPI_COMM_WORLD );
48  return err || result;
49 }
50 
51 int run_test( ErrorCode ( *func )( const char*, const char* ),
52  const char* func_name,
53  const std::string& file_name,
54  const char* option )
55 {
56  ErrorCode result = ( *func )( file_name.c_str(), option );
57  int is_err = is_any_proc_error( ( MB_SUCCESS != result ) );
58  int rank;
59  MPI_Comm_rank( MPI_COMM_WORLD, &rank );
60  if( rank == 0 )
61  {
62  if( is_err )
63  std::cout << func_name << " : FAILED!!" << std::endl;
64  else
65  std::cout << func_name << " : success" << std::endl;
66  }
67 
68  return is_err;
69 }
70 
71 int main( int argc, char* argv[] )
72 {
73  int rank, size;
74  MPI_Init( &argc, &argv );
75  MPI_Comm_rank( MPI_COMM_WORLD, &rank );
76  MPI_Comm_size( MPI_COMM_WORLD, &size );
77  int num_errors = 0;
78 
79  const char* option;
80  std::string vtk_test_filename = TestDir + "unittest/hex_2048.vtk";
81 
82 #ifdef MOAB_HAVE_HDF5
83  std::string filename;
84  if( 1 < argc )
85  filename = std::string( argv[1] );
86  else
87  filename = TestDir + "unittest/64bricks_512hex.h5m";
88 
89  //=========== read_delete, geom_dimension, resolve_shared
90  option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;"
91  "PARALLEL_RESOLVE_SHARED_ENTS;";
92  num_errors += RUN_TEST_ARG3( test_read, filename, option );
93 
94  //=========== read_delete, material_set, resolve_shared
95  option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
96  "SHARED_ENTS;";
97  num_errors += RUN_TEST_ARG3( test_read, filename, option );
98 
99  //=========== bcast_delete, geom_dimension, resolve_shared
100  option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;"
101  "PARALLEL_RESOLVE_SHARED_ENTS;";
102  num_errors += RUN_TEST_ARG3( test_read, filename, option );
103 
104  //=========== bcast_delete, material_set, resolve_shared
105  option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
106  "SHARED_ENTS;";
107  num_errors += RUN_TEST_ARG3( test_read, filename, option );
108 
109  //=========== read_delete, geom_dimension, resolve_shared, exch ghost
110  option = "PARALLEL=READ_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;"
111  "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;";
112  num_errors += RUN_TEST_ARG3( test_read, filename, option );
113 
114  //=========== read_delete, material_set, resolve_shared, exch ghost
115  option = "PARALLEL=READ_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
116  "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;";
117  num_errors += RUN_TEST_ARG3( test_read, filename, option );
118 
119  //=========== bcast_delete, geom_dimension, resolve_shared, exch ghost
120  option = "PARALLEL=BCAST_DELETE;PARTITION=GEOM_DIMENSION;PARTITION_VAL=3;PARTITION_DISTRIBUTE;"
121  "PARALLEL_RESOLVE_SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;";
122  num_errors += RUN_TEST_ARG3( test_read, filename, option );
123 
124  //=========== bcast_delete, material_set, resolve_shared, exch ghost
125  option = "PARALLEL=BCAST_DELETE;PARTITION=MATERIAL_SET;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
126  "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;";
127  num_errors += RUN_TEST_ARG3( test_read, filename, option );
128 #endif
129  if( vtk_test_filename.size() )
130  {
131  //=========== bcast_delete, trivial, resolve_shared
132  option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
133  "SHARED_ENTS;";
134  num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option );
135  //=========== bcast_delete, trivial, resolve_shared + ghosting
136  option = "PARALLEL=BCAST_DELETE;PARTITION=TRIVIAL;PARTITION_DISTRIBUTE;PARALLEL_RESOLVE_"
137  "SHARED_ENTS;PARALLEL_GHOSTS=3.0.1;";
138  num_errors += RUN_TEST_ARG3( test_read, vtk_test_filename, option );
139  }
140  MPI_Finalize();
141 
142  return num_errors;
143 }
144 
145 ErrorCode test_read( const char* filename, const char* option )
146 {
149  ErrorCode rval;
150 
151  rval = moab.load_file( filename, 0, option );CHKERR( rval );
152 
153  ParallelComm* pcomm = ParallelComm::get_pcomm( &moab, 0 );
154 
155  rval = pcomm->check_all_shared_handles();CHKERR( rval );
156 
157  return MB_SUCCESS;
158 }