Skip to content

Commit

Permalink
add logic to bp plugin to limit max hdf5 file handles open (#19043)
Browse files Browse the repository at this point in the history
* add logic to bp plugin to limit max hdf5 file handles open

* update release notes
  • Loading branch information
cyrush authored Nov 10, 2023
1 parent db9ecdd commit 5b705f1
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 3 deletions.
36 changes: 33 additions & 3 deletions src/databases/Blueprint/avtBlueprintTreeCache.C
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ using namespace mfem;
///
/// avtBlueprintTreeCache::CacheMap Interface
///
//
// Modifications:
// Cyrus Harrison, Tue Nov 7 15:35:20 PST 2023
// Add logic to limit max number of file handles held open
//
//----------------------------------------------------------------------------/
class avtBlueprintTreeCache::CacheMap
{
Expand All @@ -59,13 +64,17 @@ class avtBlueprintTreeCache::CacheMap

uint64 TotalSize() const;
uint64 TotalHDF5Ids() const;

void CloseHDF5FileHandle(hid_t h5_file_id);

void Release();

private:
std::map<int,Node> m_nodes;
std::map<int,Node> m_sidre_nodes;
int m_max_file_handles;
std::map<std::string,hid_t> m_h5_ids;
std::vector<std::string> m_h5_path_open_order;

};

Expand All @@ -79,6 +88,7 @@ class avtBlueprintTreeCache::CacheMap
avtBlueprintTreeCache::CacheMap::CacheMap()
: m_nodes(),
m_sidre_nodes(),
m_max_file_handles(256),
m_h5_ids()
{}

Expand All @@ -104,12 +114,11 @@ avtBlueprintTreeCache::CacheMap::Release()
{

hid_t h5_file_id = (*itr).second;
// close the hdf5 file
CHECK_HDF5_ERROR(H5Fclose(h5_file_id),
"Error closing HDF5 file handle: " << h5_file_id);
CloseHDF5FileHandle(h5_file_id);
}

m_h5_ids.clear();
m_h5_path_open_order.clear();

}

Expand All @@ -132,8 +141,21 @@ hid_t
avtBlueprintTreeCache::CacheMap::FetchHDF5Id(const std::string &file_path)
{
hid_t h5_file_id = -1;

// check if handle is open
if ( m_h5_ids.find(file_path) == m_h5_ids.end() )
{
// handle is not open, check if we are at max number of handles
if( TotalHDF5Ids() > m_max_file_handles )
{
// take the first entry and close it
std::string h5_file_path_to_close = m_h5_path_open_order[0];
hid_t h5_id_to_close = m_h5_ids[h5_file_path_to_close];
CloseHDF5FileHandle(h5_id_to_close);
m_h5_path_open_order.erase(m_h5_path_open_order.begin());
m_h5_ids.erase(h5_file_path_to_close);
}

// assume fetch_path points to a hdf5 dataset
// open the hdf5 file for reading
h5_file_id = H5Fopen(file_path.c_str(),
Expand All @@ -142,6 +164,7 @@ avtBlueprintTreeCache::CacheMap::FetchHDF5Id(const std::string &file_path)
CHECK_HDF5_ERROR(h5_file_id,
"Error opening HDF5 file for reading: " << file_path);
BP_PLUGIN_INFO("opened " << file_path << " hdf5 id = " << h5_file_id);
m_h5_path_open_order.push_back(file_path);
m_h5_ids[file_path] = h5_file_id;
}
else
Expand All @@ -152,6 +175,13 @@ avtBlueprintTreeCache::CacheMap::FetchHDF5Id(const std::string &file_path)
return h5_file_id;
}

//----------------------------------------------------------------------------/
void avtBlueprintTreeCache::CacheMap::CloseHDF5FileHandle(hid_t h5_file_id)
{
// close the hdf5 file
CHECK_HDF5_ERROR(H5Fclose(h5_file_id),
"Error closing HDF5 file handle: " << h5_file_id);
}

//----------------------------------------------------------------------------/
uint64
Expand Down
2 changes: 2 additions & 0 deletions src/resources/help/en_US/relnotes3.4.0.html
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,8 @@
<li>Removed the HDF 4 support in the Enzo reader.</li>
<li>VisIt's Blueprint reader now supports materials with material numbers that do NOT fall in the range [0, N), where N is the number of materials.</li>
<li>VisIt's Blueprint reader now detects high-order volume fractions fields following the naming pattern <i>volume_fraction_ZZZ</i> as a material.</li>
<li>VisIt's Blueprint reader now limits the total number of open HDF5 file handles.</li>

</ul>

<a name="Plot_changes"></a>
Expand Down

0 comments on commit 5b705f1

Please sign in to comment.