Mesh Oriented datABase  (version 5.5.0)
An array-based unstructured mesh library
NCHelperESMF.cpp
Go to the documentation of this file.
1 /*
2  * NCHelperESMF.cpp
3  *
4  * Created on: Sep. 12, 2023
5  */
6 
7 #include "NCHelperESMF.hpp"
8 #include "moab/ReadUtilIface.hpp"
9 #include "moab/FileOptions.hpp"
10 #include "MBTagConventions.hpp"
11 
12 #ifdef MOAB_HAVE_ZOLTAN
14 #endif
15 
16 #include <cmath>
17 
18 namespace moab
19 {
20 
22 const double pideg = acos( -1.0 ) / 180.0;
23 
24 NCHelperESMF::NCHelperESMF( ReadNC* readNC, int fileId, const FileOptions& opts, EntityHandle fileSet )
25  : UcdNCHelper( readNC, fileId, opts, fileSet ), maxEdgesPerCell( DEFAULT_MAX_EDGES_PER_CELL ), coordDim( 0 ),
26  centerCoordsId( -1 ), degrees( true )
27 {
28 }
29 
31 {
32  std::vector< std::string >& dimNames = readNC->dimNames;
33  if( ( std::find( dimNames.begin(), dimNames.end(), std::string( "nodeCount" ) ) != dimNames.end() ) &&
34  ( std::find( dimNames.begin(), dimNames.end(), std::string( "elementCount" ) ) != dimNames.end() ) &&
35  ( std::find( dimNames.begin(), dimNames.end(), std::string( "maxNodePElement" ) ) != dimNames.end() ) &&
36  ( std::find( dimNames.begin(), dimNames.end(), std::string( "coordDim" ) ) != dimNames.end() ) )
37  {
38  return true;
39  }
40 
41  return false;
42 }
43 
45 {
46  std::vector< std::string >& dimNames = _readNC->dimNames;
47  std::vector< int >& dimLens = _readNC->dimLens;
48  std::map< std::string, ReadNC::VarData >& varInfo = _readNC->varInfo;
49 
50  ErrorCode rval;
51  unsigned int idx;
52  std::vector< std::string >::iterator vit;
53 
54  // Get max edges per cell reported in the ESMF file header
55  if( ( vit = std::find( dimNames.begin(), dimNames.end(), "maxNodePElement" ) ) != dimNames.end() )
56  {
57  idx = vit - dimNames.begin();
58  maxEdgesPerCell = dimLens[idx];
60  {
62  "maxEdgesPerCell read from the ESMF file header has exceeded " << DEFAULT_MAX_EDGES_PER_CELL );
63  }
64  }
65 
66  // Get number of cells
67  if( ( vit = std::find( dimNames.begin(), dimNames.end(), "elementCount" ) ) != dimNames.end() )
68  idx = vit - dimNames.begin();
69  else
70  {
71  MB_SET_ERR( MB_FAILURE, "Couldn't find 'elementCount' dimension" );
72  }
73  cDim = idx;
74  nCells = dimLens[idx];
75 
76  // Get number of vertices per cell (max)
77  if( ( vit = std::find( dimNames.begin(), dimNames.end(), "maxNodePElement" ) ) != dimNames.end() )
78  idx = vit - dimNames.begin();
79  else
80  {
81  MB_SET_ERR( MB_FAILURE, "Couldn't find 'maxNodePElement' dimension" );
82  }
83 
84  maxEdgesPerCell = dimLens[idx];
85 
86  // Get number of vertices
87  vDim = -1;
88  if( ( vit = std::find( dimNames.begin(), dimNames.end(), "nodeCount" ) ) != dimNames.end() )
89  idx = vit - dimNames.begin();
90  else
91  {
92  MB_SET_ERR( MB_FAILURE, "Couldn't find 'nodeCount' dimension" );
93  }
94  vDim = idx;
95  nVertices = dimLens[idx];
96 
97  // Get number of coord dimensions
98  if( ( vit = std::find( dimNames.begin(), dimNames.end(), "coordDim" ) ) != dimNames.end() )
99  idx = vit - dimNames.begin();
100  else
101  {
102  MB_SET_ERR( MB_FAILURE, "Couldn't find 'coordDim' dimension" );
103  }
104 
105  coordDim = dimLens[idx];
106 
107  int success = NCFUNC( inq_varid )( _fileId, "centerCoords", &centerCoordsId );
108  if( success ) centerCoordsId = -1; // no center coords variable
109 
110  // decide now the units, by looking at nodeCoords; they should always exist
111  int nodeCoordsId;
112  success = NCFUNC( inq_varid )( _fileId, "nodeCoords", &nodeCoordsId );
113  if( success ) MB_CHK_SET_ERR( MB_FAILURE, "Trouble getting nodeCoords" );
114 
115  auto vmit = varInfo.find( "nodeCoords" );
116  if( varInfo.end() == vmit )
117  MB_SET_ERR( MB_FAILURE, "Couldn't find variable "
118  << "nodeCoords" );
119  ReadNC::VarData& glData = vmit->second;
120  auto attIt = glData.varAtts.find( "units" );
121  if( attIt != glData.varAtts.end() )
122  {
123  unsigned int sz = attIt->second.attLen;
124  std::string att_data;
125  att_data.resize( sz + 1 );
126  att_data[sz] = '\000';
127  success =
128  NCFUNC( get_att_text )( _fileId, attIt->second.attVarId, attIt->second.attName.c_str(), &att_data[0] );
129  if( 0 == success && att_data.find( "radians" ) != std::string::npos ) degrees = false;
130  }
131 
132  // Hack: create dummy variables for dimensions (like nCells) with no corresponding coordinate
133  // variables
134  rval = create_dummy_variables();MB_CHK_SET_ERR( rval, "Failed to create dummy variables" );
135 
136  return MB_SUCCESS;
137 }
138 
140 {
141  bool& noMixedElements = _readNC->noMixedElements;
142  DebugOutput& dbgOut = _readNC->dbgOut;
143 
144 #ifdef MOAB_HAVE_MPI
145  int rank = 0;
146  int procs = 1;
147  bool& isParallel = _readNC->isParallel;
148  ParallelComm* myPcomm = NULL;
149  if( isParallel )
150  {
151  myPcomm = _readNC->myPcomm;
152  rank = myPcomm->proc_config().proc_rank();
153  procs = myPcomm->proc_config().proc_size();
154  }
155 
156  if( procs >= 2 )
157  {
158  // Shift rank to obtain a rotated trivial partition
159  int shifted_rank = rank;
160  int& trivialPartitionShift = _readNC->trivialPartitionShift;
161  if( trivialPartitionShift > 0 ) shifted_rank = ( rank + trivialPartitionShift ) % procs;
162 
163  // Compute the number of local cells on this proc
164  nLocalCells = int( std::floor( 1.0 * nCells / procs ) );
165 
166  // The starting global cell index in the ESMF file for this proc
167  int start_cell_idx = shifted_rank * nLocalCells;
168 
169  // Number of extra cells after equal split over procs
170  int iextra = nCells % procs;
171 
172  // Allocate extra cells over procs
173  if( shifted_rank < iextra ) nLocalCells++;
174  start_cell_idx += std::min( shifted_rank, iextra );
175 
176  start_cell_idx++; // 0 based -> 1 based
177 
178  // Redistribute local cells after trivial partition (e.g. apply Zoltan partition)
179  ErrorCode rval = redistribute_local_cells( start_cell_idx, myPcomm );MB_CHK_SET_ERR( rval, "Failed to redistribute local cells after trivial partition" );
180  }
181  else
182  {
185  }
186 #else
189 #endif
190  dbgOut.tprintf( 1, " localGidCells.psize() = %d\n", (int)localGidCells.psize() );
191  dbgOut.tprintf( 1, " localGidCells.size() = %d\n", (int)localGidCells.size() );
192 
193  // Read number of edges on each local cell, to calculate actual maxEdgesPerCell
194  int nEdgesOnCellVarId;
195  int success = NCFUNC( inq_varid )( _fileId, "numElementConn", &nEdgesOnCellVarId );
196  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of numElementConn" );
197  std::vector< int > num_edges_on_local_cells( nLocalCells );
198 
199 #ifdef MOAB_HAVE_PNETCDF
200  size_t nb_reads = localGidCells.psize();
201  std::vector< int > requests( nb_reads );
202  std::vector< int > statuss( nb_reads );
203  size_t idxReq = 0;
204 #endif
205  size_t indexInArray = 0;
206  for( Range::pair_iterator pair_iter = localGidCells.pair_begin(); pair_iter != localGidCells.pair_end();
207  ++pair_iter )
208  {
209  EntityHandle starth = pair_iter->first;
210  EntityHandle endh = pair_iter->second;
211  NCDF_SIZE read_start = (NCDF_SIZE)( starth - 1 );
212  NCDF_SIZE read_count = (NCDF_SIZE)( endh - starth + 1 );
213 
214  // Do a partial read in each subrange
215 #ifdef MOAB_HAVE_PNETCDF
216  success = NCFUNCREQG( _vara_int )( _fileId, nEdgesOnCellVarId, &read_start, &read_count,
217  &( num_edges_on_local_cells[indexInArray] ), &requests[idxReq++] );
218 #else
219  success = NCFUNCAG( _vara_int )( _fileId, nEdgesOnCellVarId, &read_start, &read_count,
220  &( num_edges_on_local_cells[indexInArray] ) );
221 #endif
222  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to read nEdgesOnCell data in a loop" );
223 
224  // Increment the index for next subrange
225  indexInArray += ( endh - starth + 1 );
226  }
227 
228 #ifdef MOAB_HAVE_PNETCDF
229  // Wait outside the loop
230  success = NCFUNC( wait_all )( _fileId, requests.size(), &requests[0], &statuss[0] );
231  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on wait_all" );
232 #endif
233 
234  // Read vertices on each local cell, to get localGidVerts and cell connectivity later
235  int verticesOnCellVarId;
236  success = NCFUNC( inq_varid )( _fileId, "elementConn", &verticesOnCellVarId );
237  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of elementConn" );
238  std::vector< int > vertices_on_local_cells( nLocalCells * maxEdgesPerCell );
239 #ifdef MOAB_HAVE_PNETCDF
240  idxReq = 0;
241 #endif
242  indexInArray = 0;
243  for( Range::pair_iterator pair_iter = localGidCells.pair_begin(); pair_iter != localGidCells.pair_end();
244  ++pair_iter )
245  {
246  EntityHandle starth = pair_iter->first;
247  EntityHandle endh = pair_iter->second;
248  NCDF_SIZE read_starts[2] = { static_cast< NCDF_SIZE >( starth - 1 ), 0 };
249  NCDF_SIZE read_counts[2] = { static_cast< NCDF_SIZE >( endh - starth + 1 ),
250  static_cast< NCDF_SIZE >( maxEdgesPerCell ) };
251 
252  // Do a partial read in each subrange
253 #ifdef MOAB_HAVE_PNETCDF
254  success = NCFUNCREQG( _vara_int )( _fileId, verticesOnCellVarId, read_starts, read_counts,
255  &( vertices_on_local_cells[indexInArray] ), &requests[idxReq++] );
256 #else
257  success = NCFUNCAG( _vara_int )( _fileId, verticesOnCellVarId, read_starts, read_counts,
258  &( vertices_on_local_cells[indexInArray] ) );
259 #endif
260  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to read verticesOnCell data in a loop" );
261 
262  // Increment the index for next subrange
263  indexInArray += ( endh - starth + 1 ) * maxEdgesPerCell;
264  }
265 
266 #ifdef MOAB_HAVE_PNETCDF
267  // Wait outside the loop
268  success = NCFUNC( wait_all )( _fileId, requests.size(), &requests[0], &statuss[0] );
269  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on wait_all" );
270 #endif
271 
272  // Correct local cell vertices array, replace the padded vertices with the last vertices
273  // in the corresponding cells; sometimes the padded vertices are 0, sometimes a large
274  // vertex id. Make sure they are consistent to our padded option
275  for( int local_cell_idx = 0; local_cell_idx < nLocalCells; local_cell_idx++ )
276  {
277  int num_edges = num_edges_on_local_cells[local_cell_idx];
278  int idx_in_local_vert_arr = local_cell_idx * maxEdgesPerCell;
279  int last_vert_idx = vertices_on_local_cells[idx_in_local_vert_arr + num_edges - 1];
280  for( int i = num_edges; i < maxEdgesPerCell; i++ )
281  vertices_on_local_cells[idx_in_local_vert_arr + i] = last_vert_idx;
282  }
283 
284  // Create local vertices
285  EntityHandle start_vertex;
286  ErrorCode rval = create_local_vertices( vertices_on_local_cells, start_vertex );MB_CHK_SET_ERR( rval, "Failed to create local vertices for MPAS mesh" );
287 
288  // Create local cells, either unpadded or padded
289  if( noMixedElements )
290  {
291  rval = create_padded_local_cells( vertices_on_local_cells, start_vertex, faces );MB_CHK_SET_ERR( rval, "Failed to create padded local cells for MPAS mesh" );
292  }
293  else
294  {
295  rval = create_local_cells( vertices_on_local_cells, num_edges_on_local_cells, start_vertex, faces );MB_CHK_SET_ERR( rval, "Failed to create local cells for MPAS mesh" );
296  }
297 
298  return MB_SUCCESS;
299 }
300 
301 #ifdef MOAB_HAVE_MPI
302 ErrorCode NCHelperESMF::redistribute_local_cells( int start_cell_idx, ParallelComm* pco )
303 {
304 
305  // If possible, apply Zoltan partition
306  // will start from trivial partition in cell space
307  // will read cell connectivities, coordinates of vertices in conn, and then compute centers of the cells
308  //
309 #ifdef MOAB_HAVE_ZOLTAN
311  {
312  // if we have centerCoords, use them; if not, compute them for Zoltan to work
313  std::vector< double > xverts, yverts, zverts;
314  xverts.resize( nLocalCells );
315  yverts.resize( nLocalCells );
316  zverts.resize( nLocalCells );
317 
318  if( centerCoordsId >= 0 )
319  {
320  // read from file
321  std::vector< double > coords( nLocalCells * coordDim );
322 
323  NCDF_SIZE read_starts[2] = { static_cast< NCDF_SIZE >( start_cell_idx - 1 ), 0 };
324  NCDF_SIZE read_counts[2] = { static_cast< NCDF_SIZE >( nLocalCells ),
325  static_cast< NCDF_SIZE >( coordDim ) };
326  int success = NCFUNCAG( _vara_double )( _fileId, centerCoordsId, read_starts, read_counts, &( coords[0] ) );
327  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on reading center coordinates" );
328  if( 2 == coordDim )
329  {
330  double factor = 1.;
331  if( degrees ) factor = pideg;
332  for( int i = 0; i < nLocalCells; i++ )
333  {
334  double lon = coords[i * 2] * factor;
335  double lat = coords[i * 2 + 1] * factor;
336  double cosphi = cos( lat );
337  zverts[i] = sin( lat );
338  xverts[i] = cosphi * cos( lon );
339  yverts[i] = cosphi * sin( lon );
340  }
341  }
342  }
343  else
344  {
345  // Read connectivities
346  // Read vertices on each local cell, to get localGidVerts and cell connectivity later
347  int verticesOnCellVarId;
348  int success = NCFUNC( inq_varid )( _fileId, "elementConn", &verticesOnCellVarId );
349  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of elementConn" );
350  std::vector< int > vertices_on_local_cells( nLocalCells * maxEdgesPerCell );
351 
352  NCDF_SIZE read_starts[2] = { static_cast< NCDF_SIZE >( start_cell_idx - 1 ), 0 };
353  NCDF_SIZE read_counts[2] = { static_cast< NCDF_SIZE >( nLocalCells ),
354  static_cast< NCDF_SIZE >( maxEdgesPerCell ) };
355 
356  success = NCFUNCAG( _vara_int )( _fileId, verticesOnCellVarId, read_starts, read_counts,
357  &( vertices_on_local_cells[0] ) );
358  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on reading elementConn variable" );
359  std::vector< int > num_edges_on_local_cells( nLocalCells );
360 
361  NCDF_SIZE read_start = start_cell_idx - 1;
362  NCDF_SIZE read_count = (NCDF_SIZE)( nLocalCells );
363 
364  int nEdgesOnCellVarId;
365  success = NCFUNC( inq_varid )( _fileId, "numElementConn", &nEdgesOnCellVarId );
366  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of numElementConn" );
367  success = NCFUNCAG( _vara_int )( _fileId, nEdgesOnCellVarId, &read_start, &read_count,
368  &( num_edges_on_local_cells[0] ) );
369  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get numElementConn" );
370  // Make a copy of vertices_on_local_cells for sorting (keep original one to set cell
371  // connectivity later)
372 
373  // Correct local cell vertices array, replace the padded vertices with the last vertices
374  // in the corresponding cells; sometimes the padded vertices are 0, sometimes a large
375  // vertex id. Make sure they are consistent to our padded option
376  for( int local_cell_idx = 0; local_cell_idx < nLocalCells; local_cell_idx++ )
377  {
378  int num_edges = num_edges_on_local_cells[local_cell_idx];
379  int idx_in_local_vert_arr = local_cell_idx * maxEdgesPerCell;
380  int last_vert_idx = vertices_on_local_cells[idx_in_local_vert_arr + num_edges - 1];
381  for( int i = num_edges; i < maxEdgesPerCell; i++ )
382  vertices_on_local_cells[idx_in_local_vert_arr + i] = last_vert_idx;
383  }
384 
385  std::vector< int > vertices_on_local_cells_sorted( vertices_on_local_cells );
386  std::sort( vertices_on_local_cells_sorted.begin(), vertices_on_local_cells_sorted.end() );
387  std::copy( vertices_on_local_cells_sorted.rbegin(), vertices_on_local_cells_sorted.rend(),
388  range_inserter( localGidVerts ) );
390 
391  // Read nodeCoords for local vertices
392  double* coords = new double[localGidVerts.size() * coordDim];
393  int nodeCoordVarId;
394  success = NCFUNC( inq_varid )( _fileId, "nodeCoords", &nodeCoordVarId );
395  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of nodeCoords" );
396 #ifdef MOAB_HAVE_PNETCDF
397  size_t nb_reads = localGidVerts.psize();
398  std::vector< int > requests( nb_reads );
399  std::vector< int > statuss( nb_reads );
400  size_t idxReq = 0;
401 #endif
402  size_t indexInArray = 0;
403  for( Range::pair_iterator pair_iter = localGidVerts.pair_begin(); pair_iter != localGidVerts.pair_end();
404  ++pair_iter )
405  {
406  EntityHandle starth = pair_iter->first;
407  EntityHandle endh = pair_iter->second;
408  NCDF_SIZE read_starts[2] = { static_cast< NCDF_SIZE >( starth - 1 ), 0 };
409  NCDF_SIZE read_counts[2] = { static_cast< NCDF_SIZE >( endh - starth + 1 ),
410  static_cast< NCDF_SIZE >( coordDim ) };
411 
412  // Do a partial read in each subrange
413 #ifdef MOAB_HAVE_PNETCDF
414  success = NCFUNCREQG( _vara_double )( _fileId, nodeCoordVarId, read_starts, read_counts,
415  &coords[indexInArray], &requests[idxReq++] );
416 #else
417  success = NCFUNCAG( _vara_double )( _fileId, nodeCoordVarId, read_starts, read_counts,
418  &coords[indexInArray] );
419 #endif
420  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to read nodeCoords data in a loop" );
421 
422  // Increment the index for next subrange
423  indexInArray += ( endh - starth + 1 ) * coordDim;
424  }
425 
426 #ifdef MOAB_HAVE_PNETCDF
427  // Wait outside the loop
428  success = NCFUNC( wait_all )( _fileId, requests.size(), &requests[0], &statuss[0] );
429  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on wait_all" );
430 #endif
431 
432  double* coords3d = new double[localGidVerts.size() * 3];
433  // now convert from lat/lon to 3d
434  if( 2 == coordDim )
435  {
436  // basically convert from degrees to xyz on a sphere
437  double factor = 1.;
438  if( degrees ) factor = pideg;
439  for( int i = 0; i < (int)localGidVerts.size(); i++ )
440  {
441  double lon = coords[i * 2] * factor;
442  double lat = coords[i * 2 + 1] * factor;
443  double cosphi = cos( lat );
444  coords3d[3 * i + 2] = sin( lat );
445  coords3d[3 * i] = cosphi * cos( lon );
446  coords3d[3 * i + 1] = cosphi * sin( lon );
447  }
448  }
449 
450  // find the center for each cell
451  for( int i = 0; i < nLocalCells; i++ )
452  {
453  int nv = num_edges_on_local_cells[i];
454  double x = 0., y = 0., z = 0.;
455  for( int j = 0; j < nv; j++ )
456  {
457  int vertexId = vertices_on_local_cells[i * maxEdgesPerCell + j];
458  // now find what index is in gid
459  int index = localGidVerts.index( vertexId ); // it should be a binary search!
460  x += coords3d[3 * index];
461  y += coords3d[3 * index + 1];
462  z += coords3d[3 * index + 2];
463  }
464  if( nv != 0 )
465  {
466  x /= nv;
467  y /= nv;
468  z /= nv;
469  }
470  xverts[i] = x;
471  yverts[i] = y;
472  zverts[i] = z;
473  }
474  delete[] coords3d;
475  }
476  // Zoltan partition using RCB; maybe more studies would be good, as to which partition
477  // is better
478  Interface*& mbImpl = _readNC->mbImpl;
479  DebugOutput& dbgOut = _readNC->dbgOut;
480  ZoltanPartitioner* mbZTool = new ZoltanPartitioner( mbImpl, pco, false, 0, NULL );
481  ErrorCode rval = mbZTool->repartition( xverts, yverts, zverts, start_cell_idx, "RCB", localGidCells );MB_CHK_SET_ERR( rval, "Error in Zoltan partitioning" );
482  delete mbZTool;
483 
484  dbgOut.tprintf( 1, "After Zoltan partitioning, localGidCells.psize() = %d\n", (int)localGidCells.psize() );
485  dbgOut.tprintf( 1, " localGidCells.size() = %d\n", (int)localGidCells.size() );
486 
487  // This is important: local cells are now redistributed, so nLocalCells might be different!
490  return MB_SUCCESS;
491  }
492 #endif
493 
494  // By default, apply trivial partition
495  localGidCells.insert( start_cell_idx, start_cell_idx + nLocalCells - 1 );
496 
497  return MB_SUCCESS;
498 }
499 #endif
500 
501 ErrorCode NCHelperESMF::create_local_vertices( const std::vector< int >& vertices_on_local_cells,
502  EntityHandle& start_vertex )
503 {
504  Interface*& mbImpl = _readNC->mbImpl;
505  Tag& mGlobalIdTag = _readNC->mGlobalIdTag;
506  const Tag*& mpFileIdTag = _readNC->mpFileIdTag;
507  DebugOutput& dbgOut = _readNC->dbgOut;
508 
509  // Make a copy of vertices_on_local_cells for sorting (keep original one to set cell
510  // connectivity later)
511  std::vector< int > vertices_on_local_cells_sorted( vertices_on_local_cells );
512  std::sort( vertices_on_local_cells_sorted.begin(), vertices_on_local_cells_sorted.end() );
513  std::copy( vertices_on_local_cells_sorted.rbegin(), vertices_on_local_cells_sorted.rend(),
516 
517  dbgOut.tprintf( 1, " localGidVerts.psize() = %d\n", (int)localGidVerts.psize() );
518  dbgOut.tprintf( 1, " localGidVerts.size() = %d\n", (int)localGidVerts.size() );
519 
520  // Create local vertices
521  std::vector< double* > arrays;
522  ErrorCode rval =
523  _readNC->readMeshIface->get_node_coords( 3, nLocalVertices, 0, start_vertex, arrays, nLocalVertices );MB_CHK_SET_ERR( rval, "Failed to create local vertices" );
524 
525  // Add local vertices to current file set
526  Range local_verts_range( start_vertex, start_vertex + nLocalVertices - 1 );
527  rval = _readNC->mbImpl->add_entities( _fileSet, local_verts_range );MB_CHK_SET_ERR( rval, "Failed to add local vertices to current file set" );
528 
529  // Get ptr to GID memory for local vertices
530  int count = 0;
531  void* data = NULL;
532  rval = mbImpl->tag_iterate( mGlobalIdTag, local_verts_range.begin(), local_verts_range.end(), count, data );MB_CHK_SET_ERR( rval, "Failed to iterate global id tag on local vertices" );
533  assert( count == nLocalVertices );
534  int* gid_data = (int*)data;
535  std::copy( localGidVerts.begin(), localGidVerts.end(), gid_data );
536 
537  // Duplicate GID data, which will be used to resolve sharing
538  if( mpFileIdTag )
539  {
540  rval = mbImpl->tag_iterate( *mpFileIdTag, local_verts_range.begin(), local_verts_range.end(), count, data );MB_CHK_SET_ERR( rval, "Failed to iterate file id tag on local vertices" );
541  assert( count == nLocalVertices );
542  int bytes_per_tag = 4;
543  rval = mbImpl->tag_get_bytes( *mpFileIdTag, bytes_per_tag );MB_CHK_SET_ERR( rval, "Can't get number of bytes for file id tag" );
544  if( 4 == bytes_per_tag )
545  {
546  gid_data = (int*)data;
547  std::copy( localGidVerts.begin(), localGidVerts.end(), gid_data );
548  }
549  else if( 8 == bytes_per_tag )
550  { // Should be a handle tag on 64 bit machine?
551  long* handle_tag_data = (long*)data;
552  std::copy( localGidVerts.begin(), localGidVerts.end(), handle_tag_data );
553  }
554  }
555 
556 #ifdef MOAB_HAVE_PNETCDF
557  size_t nb_reads = localGidVerts.psize();
558  std::vector< int > requests( nb_reads );
559  std::vector< int > statuss( nb_reads );
560  size_t idxReq = 0;
561 #endif
562 
563  // Read nodeCoords for local vertices
564  double* coords = new double[localGidVerts.size() * coordDim];
565  int nodeCoordVarId;
566  int success = NCFUNC( inq_varid )( _fileId, "nodeCoords", &nodeCoordVarId );
567  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to get variable id of nodeCoords" );
568  size_t indexInArray = 0;
569  for( Range::pair_iterator pair_iter = localGidVerts.pair_begin(); pair_iter != localGidVerts.pair_end();
570  ++pair_iter )
571  {
572  EntityHandle starth = pair_iter->first;
573  EntityHandle endh = pair_iter->second;
574  NCDF_SIZE read_starts[2] = { static_cast< NCDF_SIZE >( starth - 1 ), 0 };
575  NCDF_SIZE read_counts[2] = { static_cast< NCDF_SIZE >( endh - starth + 1 ),
576  static_cast< NCDF_SIZE >( coordDim ) };
577 
578  // Do a partial read in each subrange
579 #ifdef MOAB_HAVE_PNETCDF
580  success = NCFUNCREQG( _vara_double )( _fileId, nodeCoordVarId, read_starts, read_counts, &coords[indexInArray],
581  &requests[idxReq++] );
582 #else
583  success = NCFUNCAG( _vara_double )( _fileId, nodeCoordVarId, read_starts, read_counts, &coords[indexInArray] );
584 #endif
585  if( success ) MB_SET_ERR( MB_FAILURE, "Failed to read nodeCoords data in a loop" );
586 
587  // Increment the index for next subrange
588  indexInArray += ( endh - starth + 1 ) * coordDim;
589  }
590 
591 #ifdef MOAB_HAVE_PNETCDF
592  // Wait outside the loop
593  success = NCFUNC( wait_all )( _fileId, requests.size(), &requests[0], &statuss[0] );
594  if( success ) MB_SET_ERR( MB_FAILURE, "Failed on wait_all" );
595 #endif
596 
597  // now convert from lat/lon to 3d
598  if( 2 == coordDim )
599  {
600  // basically convert from degrees to xyz on a sphere
601  double factor = 1;
602  if( degrees ) factor = pideg;
603  for( int i = 0; i < (int)localGidVerts.size(); i++ )
604  {
605  double lon = coords[i * 2] * factor;
606  double lat = coords[i * 2 + 1] * factor;
607  double cosphi = cos( lat );
608  arrays[2][i] = sin( lat );
609  arrays[0][i] = cosphi * cos( lon );
610  arrays[1][i] = cosphi * sin( lon );
611  }
612  }
613  delete[] coords;
614  return MB_SUCCESS;
615 }
616 
617 ErrorCode NCHelperESMF::create_local_cells( const std::vector< int >& vertices_on_local_cells,
618  const std::vector< int >& num_edges_on_local_cells,
619  EntityHandle start_vertex,
620  Range& faces )
621 {
622  Interface*& mbImpl = _readNC->mbImpl;
623  Tag& mGlobalIdTag = _readNC->mGlobalIdTag;
624 
625  // Divide local cells into groups based on the number of edges
626  Range local_cells_with_n_edges[DEFAULT_MAX_EDGES_PER_CELL + 1];
627  // Insert larger values before smaller ones to increase efficiency
628  for( int i = nLocalCells - 1; i >= 0; i-- )
629  {
630  int num_edges = num_edges_on_local_cells[i];
631  local_cells_with_n_edges[num_edges].insert( localGidCells[i] ); // Global cell index
632  }
633 
634  std::vector< int > num_edges_on_cell_groups;
635  for( int i = 3; i <= maxEdgesPerCell; i++ )
636  {
637  if( local_cells_with_n_edges[i].size() > 0 ) num_edges_on_cell_groups.push_back( i );
638  }
639  int numCellGroups = (int)num_edges_on_cell_groups.size();
640  EntityHandle* conn_arr_local_cells_with_n_edges[DEFAULT_MAX_EDGES_PER_CELL + 1];
641  for( int i = 0; i < numCellGroups; i++ )
642  {
643  int num_edges_per_cell = num_edges_on_cell_groups[i];
644  int num_group_cells = (int)local_cells_with_n_edges[num_edges_per_cell].size();
645 
646  EntityType typeEl = MBTRI;
647  if( num_edges_per_cell == 4 ) typeEl = MBQUAD;
648  if( num_edges_per_cell > 4 ) typeEl = MBPOLYGON;
649  // Create local cells for each non-empty cell group
650  EntityHandle start_element;
651  ErrorCode rval =
652  _readNC->readMeshIface->get_element_connect( num_group_cells, num_edges_per_cell, typeEl, 0, start_element,
653  conn_arr_local_cells_with_n_edges[num_edges_per_cell],
654  num_group_cells );MB_CHK_SET_ERR( rval, "Failed to create local cells" );
655  faces.insert( start_element, start_element + num_group_cells - 1 );
656 
657  // Add local cells to current file set
658  Range local_cells_range( start_element, start_element + num_group_cells - 1 );
659  rval = _readNC->mbImpl->add_entities( _fileSet, local_cells_range );MB_CHK_SET_ERR( rval, "Failed to add local cells to current file set" );
660 
661  // Get ptr to gid memory for local cells
662  int count = 0;
663  void* data = NULL;
664  rval = mbImpl->tag_iterate( mGlobalIdTag, local_cells_range.begin(), local_cells_range.end(), count, data );MB_CHK_SET_ERR( rval, "Failed to iterate global id tag on local cells" );
665  assert( count == num_group_cells );
666  int* gid_data = (int*)data;
667  std::copy( local_cells_with_n_edges[num_edges_per_cell].begin(),
668  local_cells_with_n_edges[num_edges_per_cell].end(), gid_data );
669 
670  // Set connectivity array with proper local vertices handles
671  for( int j = 0; j < num_group_cells; j++ )
672  {
673  EntityHandle global_cell_idx =
674  local_cells_with_n_edges[num_edges_per_cell][j]; // Global cell index, 1 based
675  int local_cell_idx = localGidCells.index( global_cell_idx ); // Local cell index, 0 based
676  assert( local_cell_idx != -1 );
677 
678  for( int k = 0; k < num_edges_per_cell; k++ )
679  {
680  EntityHandle global_vert_idx =
681  vertices_on_local_cells[local_cell_idx * maxEdgesPerCell + k]; // Global vertex index, 1 based
682  int local_vert_idx = localGidVerts.index( global_vert_idx ); // Local vertex index, 0 based
683  assert( local_vert_idx != -1 );
684  conn_arr_local_cells_with_n_edges[num_edges_per_cell][j * num_edges_per_cell + k] =
685  start_vertex + local_vert_idx;
686  }
687  }
688  }
689 
690  return MB_SUCCESS;
691 }
692 
693 ErrorCode NCHelperESMF::create_padded_local_cells( const std::vector< int >& vertices_on_local_cells,
694  EntityHandle start_vertex,
695  Range& faces )
696 {
697  Interface*& mbImpl = _readNC->mbImpl;
698  Tag& mGlobalIdTag = _readNC->mGlobalIdTag;
699 
700  // Create cells for this cell group
701  EntityHandle start_element;
702  EntityHandle* conn_arr_local_cells = NULL;
704  start_element, conn_arr_local_cells, nLocalCells );MB_CHK_SET_ERR( rval, "Failed to create local cells" );
705  faces.insert( start_element, start_element + nLocalCells - 1 );
706 
707  // Add local cells to current file set
708  Range local_cells_range( start_element, start_element + nLocalCells - 1 );
709  rval = _readNC->mbImpl->add_entities( _fileSet, local_cells_range );MB_CHK_SET_ERR( rval, "Failed to add local cells to current file set" );
710 
711  // Get ptr to GID memory for local cells
712  int count = 0;
713  void* data = NULL;
714  rval = mbImpl->tag_iterate( mGlobalIdTag, local_cells_range.begin(), local_cells_range.end(), count, data );MB_CHK_SET_ERR( rval, "Failed to iterate global id tag on local cells" );
715  assert( count == nLocalCells );
716  int* gid_data = (int*)data;
717  std::copy( localGidCells.begin(), localGidCells.end(), gid_data );
718 
719  // Set connectivity array with proper local vertices handles
720  // vertices_on_local_cells array was already corrected to have the last vertices padded
721  // no need for extra checks considering
722  for( int local_cell_idx = 0; local_cell_idx < nLocalCells; local_cell_idx++ )
723  {
724  for( int i = 0; i < maxEdgesPerCell; i++ )
725  {
726  EntityHandle global_vert_idx =
727  vertices_on_local_cells[local_cell_idx * maxEdgesPerCell + i]; // Global vertex index, 1 based
728  int local_vert_idx = localGidVerts.index( global_vert_idx ); // Local vertex index, 0 based
729  assert( local_vert_idx != -1 );
730  conn_arr_local_cells[local_cell_idx * maxEdgesPerCell + i] = start_vertex + local_vert_idx;
731  }
732  }
733 
734  return MB_SUCCESS;
735 }
736 
737 } // namespace moab