OGS
HdfWriter.cpp
Go to the documentation of this file.
1 
10 #include "HdfWriter.h"
11 
12 #include <hdf5.h>
13 
14 #include <string>
15 #include <utility>
16 #include <vector>
17 
18 #include "BaseLib/Error.h"
19 #include "BaseLib/Logging.h"
20 #include "fileIO.h"
21 template <typename... Args>
22 void checkHdfStatus(const hid_t status, std::string const& formatting,
23  Args&&... args)
24 {
25  if (status < 0)
26  {
27  OGS_FATAL(formatting, std::forward<Args>(args)...);
28  }
29 }
30 
31 static unsigned short int const default_compression_factor = 1;
32 
33 using namespace MeshLib::IO;
34 
35 using namespace std::string_literals;
36 
37 static bool checkCompression()
38 {
39  // Check if gzip compression is available and can be used for both
40  // compression and decompression.
41  if (htri_t avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE); !avail)
42  {
43  WARN("gzip filter not available.\n");
44  return false;
45  }
46  unsigned int filter_info;
47  H5Zget_filter_info(H5Z_FILTER_DEFLATE, &filter_info);
48  if (!(filter_info & H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
49  !(filter_info & H5Z_FILTER_CONFIG_DECODE_ENABLED))
50  {
51  WARN("gzip filter not available for encoding and decoding.\n");
52  return false;
53  }
54  return true;
55 }
56 
57 static std::vector<Hdf5DimType> prependDimension(
58  Hdf5DimType const prepend_value, std::vector<Hdf5DimType> const& dimensions)
59 {
60  std::vector<Hdf5DimType> dims = {prepend_value};
61  dims.insert(dims.end(), dimensions.begin(), dimensions.end());
62  return dims;
63 }
64 
65 static hid_t createDataSet(
66  hid_t const data_type, std::vector<Hdf5DimType> const& data_dims,
67  std::vector<Hdf5DimType> const& max_dims,
68  [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
69  bool const use_compression, hid_t const section,
70  std::string const& dataset_name)
71 {
72  int const time_dim_local_size = data_dims.size() + 1;
73 
74  std::vector<Hdf5DimType> const time_max_dims =
75  prependDimension(H5S_UNLIMITED, max_dims);
76  std::vector<Hdf5DimType> const time_data_global_dims =
77  prependDimension(1, max_dims);
78 
79  std::vector<Hdf5DimType> const time_data_chunk_dims =
80  prependDimension(1, chunk_dims);
81 
82  hid_t const fspace =
83  H5Screate_simple(time_dim_local_size, time_data_global_dims.data(),
84  time_max_dims.data());
85  assert(fspace >= 0);
86 
87  hid_t const dcpl = H5Pcreate(H5P_DATASET_CREATE);
88  assert(dcpl >= 0);
89 
90  hid_t const status =
91  H5Pset_chunk(dcpl, chunk_dims.size() + 1, time_data_chunk_dims.data());
92  if (status < 0)
93  {
94  OGS_FATAL("H5Pset_layout failed for data set: {:s}.", dataset_name);
95  }
96 
97  if (use_compression)
98  {
99  H5Pset_deflate(dcpl, default_compression_factor);
100  }
101 
102  hid_t const dataset = H5Dcreate2(section, dataset_name.c_str(), data_type,
103  fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
104 
105  assert(dataset >= 0);
106  H5Pclose(dcpl);
107  assert(H5Sclose(fspace) >= 0);
108 
109  return dataset;
110 }
117 static void writeDataSet(
118  void const* nodes_data, // what
119  hid_t const data_type,
120  std::vector<Hdf5DimType> const& data_dims, // how ...
121  std::vector<Hdf5DimType> const& offset_dims,
122  std::vector<Hdf5DimType> const& max_dims,
123  [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
124  std::string const& dataset_name, Hdf5DimType const step,
125  hid_t const dataset) // where
126 {
127  Hdf5DimType const time_steps = step + 1;
128 
129  std::vector<Hdf5DimType> const time_data_local_dims = data_dims;
130  std::vector<Hdf5DimType> const time_max_dims =
131  prependDimension(time_steps, max_dims);
132  std::vector<Hdf5DimType> const time_offsets =
133  prependDimension(step, offset_dims);
134  std::vector<hsize_t> const count =
135  prependDimension(1, time_data_local_dims);
136 
137  hid_t const io_transfer_property = createHDF5TransferPolicy();
138 
139  hid_t const mspace = H5Screate_simple(time_data_local_dims.size(),
140  time_data_local_dims.data(), NULL);
141  assert(H5Sselect_all(mspace) >= 0);
142 
143  hid_t status = H5Dset_extent(dataset, time_max_dims.data());
144  if (status < 0)
145  {
146  OGS_FATAL("H5D set extent failed dataset '{:s}'.", dataset_name);
147  }
148  hid_t const fspace = H5Dget_space(dataset);
149 
150  H5Sselect_hyperslab(fspace, H5S_SELECT_SET, time_offsets.data(), NULL,
151  count.data(), NULL);
152 
153  status = H5Dwrite(dataset, data_type, mspace, fspace, io_transfer_property,
154  nodes_data);
155  if (status < 0)
156  {
157  OGS_FATAL("H5Dwrite failed in dataset '{:s}'.", dataset_name);
158  }
159 
160  H5Sclose(mspace);
161  H5Pclose(io_transfer_property);
162 
163  return;
164 }
165 
172 static void writeTimeSeries(hid_t const file,
173  std::vector<double> const& step_times,
174  bool const is_file_manager)
175 {
176  hsize_t const size = step_times.size();
177  hid_t const memspace = H5Screate_simple(1, &size, NULL);
178  hid_t const filespace = H5Screate_simple(1, &size, NULL);
179 
180  if (is_file_manager)
181  {
182  H5Sselect_all(memspace);
183  H5Sselect_all(filespace);
184  }
185  else
186  {
187  H5Sselect_none(memspace);
188  H5Sselect_none(filespace);
189  }
190 
191  hid_t const dataset =
192  H5Dcreate2(file, "/times", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT,
193  H5P_DEFAULT, H5P_DEFAULT);
194 
195  H5Dwrite(dataset, H5T_NATIVE_DOUBLE, memspace, filespace, H5P_DEFAULT,
196  step_times.data());
197 
198  H5Dclose(dataset);
199  H5Sclose(memspace);
200  H5Sclose(filespace);
201 };
202 namespace MeshLib::IO
203 {
204 struct HdfWriter::HdfMesh final
205 {
206  hid_t const group;
207  std::string const name;
208  std::map<std::string, hid_t> const datasets;
209  std::vector<HdfData> const variable_attributes;
210 };
211 
212 HdfWriter::HdfWriter(std::vector<MeshHdfData> meshes,
213  unsigned long long const initial_step,
214  std::filesystem::path const& filepath,
215  bool const use_compression,
216  bool const is_file_manager,
217  unsigned int const n_files)
218  : _hdf5_filepath(filepath),
219  _file(createFile(filepath, n_files)),
220  _meshes_group(
221  H5Gcreate2(_file, "/meshes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)),
222  _step_times{0}, // ToDo need to be initial time
223  _use_compression(checkCompression() && use_compression),
224  _is_file_manager(is_file_manager)
225 {
226  for (auto const& mesh : meshes)
227  {
228  hid_t const group = H5Gcreate2(_meshes_group, mesh.name.c_str(),
229  H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
230 
231  auto const createAndWriteDataSet = [&](auto const& attribute) -> hid_t
232  {
233  hid_t const dataset = createDataSet(
234  attribute.data_type, attribute.data_space, attribute.file_space,
235  attribute.chunk_space, _use_compression, group, attribute.name);
236 
237  checkHdfStatus(dataset, "Creating HDF5 Dataset: {:s} failed.",
238  attribute.name);
239 
240  writeDataSet(attribute.data_start, attribute.data_type,
241  attribute.data_space, attribute.offsets,
242  attribute.file_space, attribute.chunk_space,
243  attribute.name, initial_step, dataset);
244  return dataset;
245  };
246 
247  for (auto const& attribute : mesh.constant_attributes)
248  {
249  hid_t const dataset = createAndWriteDataSet(attribute);
250  H5Dclose(dataset);
251  }
252 
253  std::map<std::string, hid_t> datasets;
254  for (auto const& attribute : mesh.variable_attributes)
255  {
256  hid_t const dataset = createAndWriteDataSet(attribute);
257  // datasets are kept open
258  datasets.insert({attribute.name, dataset});
259  }
260 
261  _hdf_meshes.push_back(std::make_unique<HdfMesh>(
262  HdfMesh{group, mesh.name, datasets, mesh.variable_attributes}));
263  }
264 }
265 
267 {
269 
270  for (auto const& mesh : _hdf_meshes)
271  {
272  for (auto const& dataset : mesh->datasets)
273  {
274  H5Dclose(dataset.second);
275  }
276  H5Gclose(mesh->group);
277  }
278  H5Gclose(_meshes_group);
279  H5Fclose(_file);
280 }
281 
282 void HdfWriter::writeStep(double const time)
283 {
284  auto const output_step = _step_times.size();
285  _step_times.push_back(time);
286 
287  for (auto const& mesh : _hdf_meshes)
288  {
289  for (auto const& attribute : mesh->variable_attributes)
290  {
291  auto const& dataset_hid = mesh->datasets.find(attribute.name);
292  if (dataset_hid == mesh->datasets.end())
293  {
294  OGS_FATAL("Writing HDF5 Dataset: {:s} failed.", attribute.name);
295  }
296 
297  writeDataSet(
298  attribute.data_start, attribute.data_type, attribute.data_space,
299  attribute.offsets, attribute.file_space, attribute.chunk_space,
300  attribute.name, output_step, mesh->datasets.at(attribute.name));
301  }
302  }
303 }
304 } // namespace MeshLib::IO
#define OGS_FATAL(...)
Definition: Error.h:26
static std::vector< Hdf5DimType > prependDimension(Hdf5DimType const prepend_value, std::vector< Hdf5DimType > const &dimensions)
Definition: HdfWriter.cpp:57
static void writeTimeSeries(hid_t const file, std::vector< double > const &step_times, bool const is_file_manager)
Write vector with time values into open hdf file.
Definition: HdfWriter.cpp:172
static hid_t createDataSet(hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &max_dims, [[maybe_unused]] std::vector< Hdf5DimType > const &chunk_dims, bool const use_compression, hid_t const section, std::string const &dataset_name)
Definition: HdfWriter.cpp:65
static bool checkCompression()
Definition: HdfWriter.cpp:37
static unsigned short int const default_compression_factor
Definition: HdfWriter.cpp:31
void checkHdfStatus(const hid_t status, std::string const &formatting, Args &&... args)
Definition: HdfWriter.cpp:22
static void writeDataSet(void const *nodes_data, hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &offset_dims, std::vector< Hdf5DimType > const &max_dims, [[maybe_unused]] std::vector< Hdf5DimType > const &chunk_dims, std::string const &dataset_name, Hdf5DimType const step, hid_t const dataset)
Assumes a dataset is already opened by createDatasetFunction.
Definition: HdfWriter.cpp:117
Writes vectorized data to HDF File.
void WARN(char const *fmt, Args const &... args)
Definition: Logging.h:37
bool const _use_compression
Definition: HdfWriter.h:73
HdfWriter(std::vector< MeshHdfData > meshes, unsigned long long initial_step, std::filesystem::path const &filepath, bool use_compression, bool is_file_manager, unsigned int n_files)
Write file with geometry and topology data. The data itself is held by a structure outside of this cl...
Definition: HdfWriter.cpp:212
hid_t const _file
Definition: HdfWriter.h:69
std::vector< std::unique_ptr< HdfMesh > > _hdf_meshes
Definition: HdfWriter.h:71
bool const _is_file_manager
Definition: HdfWriter.h:74
hid_t const _meshes_group
Definition: HdfWriter.h:70
void writeStep(double time)
Writes attributes. The data itself is hold by a structure outside of this class. The writer assumes t...
Definition: HdfWriter.cpp:282
std::vector< double > _step_times
Definition: HdfWriter.h:72
Dispatches HDF5 functions specific to execution platform (w/o MPI). There are multiple implementation...
unsigned long long Hdf5DimType
Definition: HdfData.h:22
int64_t createHDF5TransferPolicy()
Definition: fileIO.cpp:69
int64_t createFile(std::filesystem::path const &filepath, unsigned int n_files)
Definition: fileIO.cpp:39
std::map< std::string, hid_t > const datasets
Definition: HdfWriter.cpp:208
std::vector< HdfData > const variable_attributes
Definition: HdfWriter.cpp:209