OGS
HdfWriter.cpp
Go to the documentation of this file.
1
10#include "HdfWriter.h"
11
12#include <hdf5.h>
13
14#include <string>
15#include <utility>
16#include <vector>
17
18#include "BaseLib/Error.h"
19#include "BaseLib/Logging.h"
20#include "fileIO.h"
21template <typename... Args>
22void checkHdfStatus(const hid_t status, fmt::format_string<Args...> formatting,
23 Args&&... args)
24{
25 if (status < 0)
26 {
27 OGS_FATAL(formatting, std::forward<Args>(args)...);
28 }
29}
30
31static unsigned short int const default_compression_factor = 1;
32
33using namespace MeshLib::IO;
34
35static bool checkCompression()
36{
37 // Check if gzip compression is available and can be used for both
38 // compression and decompression.
39 if (htri_t avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE); !avail)
40 {
41 WARN("gzip filter not available.\n");
42 return false;
43 }
44 unsigned int filter_info;
45 H5Zget_filter_info(H5Z_FILTER_DEFLATE, &filter_info);
46 if (!(filter_info & H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
47 !(filter_info & H5Z_FILTER_CONFIG_DECODE_ENABLED))
48 {
49 WARN("gzip filter not available for encoding and decoding.\n");
50 return false;
51 }
52 return true;
53}
54
55static std::vector<Hdf5DimType> prependDimension(
56 Hdf5DimType const prepend_value, std::vector<Hdf5DimType> const& dimensions)
57{
58 std::vector<Hdf5DimType> dims = {prepend_value};
59 dims.insert(dims.end(), dimensions.begin(), dimensions.end());
60 return dims;
61}
62
63static hid_t createDataSet(
64 hid_t const data_type, std::vector<Hdf5DimType> const& data_dims,
65 std::vector<Hdf5DimType> const& max_dims,
66 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
67 bool const use_compression, hid_t const section,
68 std::string const& dataset_name)
69{
70 int const time_dim_local_size = data_dims.size() + 1;
71
72 std::vector<Hdf5DimType> const time_max_dims =
73 prependDimension(H5S_UNLIMITED, max_dims);
74 std::vector<Hdf5DimType> const time_data_global_dims =
75 prependDimension(1, max_dims);
76
77 std::vector<Hdf5DimType> const time_data_chunk_dims =
78 prependDimension(1, chunk_dims);
79
80 hid_t const fspace =
81 H5Screate_simple(time_dim_local_size, time_data_global_dims.data(),
82 time_max_dims.data());
83 assert(fspace >= 0);
84
85 hid_t const dcpl = H5Pcreate(H5P_DATASET_CREATE);
86 assert(dcpl >= 0);
87
88 hid_t const status =
89 H5Pset_chunk(dcpl, chunk_dims.size() + 1, time_data_chunk_dims.data());
90 if (status < 0)
91 {
92 OGS_FATAL("H5Pset_layout failed for data set: {:s}.", dataset_name);
93 }
94
95 if (use_compression)
96 {
97 H5Pset_deflate(dcpl, default_compression_factor);
98 }
99
100 hid_t const dataset = H5Dcreate2(section, dataset_name.c_str(), data_type,
101 fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
102
103 assert(dataset >= 0);
104 H5Pclose(dcpl);
105 assert(H5Sclose(fspace) >= 0);
106
107 return dataset;
108}
115static void writeDataSet(
116 void const* nodes_data, // what
117 hid_t const data_type,
118 std::vector<Hdf5DimType> const& data_dims, // how ...
119 std::vector<Hdf5DimType> const& offset_dims,
120 std::vector<Hdf5DimType> const& max_dims,
121 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
122 std::string const& dataset_name, Hdf5DimType const step,
123 hid_t const dataset) // where
124{
125 Hdf5DimType const time_steps = step + 1;
126
127 std::vector<Hdf5DimType> const time_data_local_dims = data_dims;
128 std::vector<Hdf5DimType> const time_max_dims =
129 prependDimension(time_steps, max_dims);
130 std::vector<Hdf5DimType> const time_offsets =
131 prependDimension(step, offset_dims);
132 std::vector<hsize_t> const count =
133 prependDimension(1, time_data_local_dims);
134
135 hid_t const io_transfer_property = createHDF5TransferPolicy();
136
137 hid_t const mspace = H5Screate_simple(time_data_local_dims.size(),
138 time_data_local_dims.data(), NULL);
139 assert(H5Sselect_all(mspace) >= 0);
140
141 hid_t status = H5Dset_extent(dataset, time_max_dims.data());
142 if (status < 0)
143 {
144 OGS_FATAL("H5D set extent failed dataset '{:s}'.", dataset_name);
145 }
146 hid_t const fspace = H5Dget_space(dataset);
147
148 H5Sselect_hyperslab(fspace, H5S_SELECT_SET, time_offsets.data(), NULL,
149 count.data(), NULL);
150
151 status = H5Dwrite(dataset, data_type, mspace, fspace, io_transfer_property,
152 nodes_data);
153 if (status < 0)
154 {
155 OGS_FATAL("H5Dwrite failed in dataset '{:s}'.", dataset_name);
156 }
157
158 H5Sclose(mspace);
159 H5Pclose(io_transfer_property);
160
161 return;
162}
163
170static void writeTimeSeries(hid_t const file,
171 std::vector<double> const& step_times,
172 bool const is_file_manager)
173{
174 hsize_t const size = step_times.size();
175 hid_t const memspace = H5Screate_simple(1, &size, NULL);
176 hid_t const filespace = H5Screate_simple(1, &size, NULL);
177
178 if (is_file_manager)
179 {
180 H5Sselect_all(memspace);
181 H5Sselect_all(filespace);
182 }
183 else
184 {
185 H5Sselect_none(memspace);
186 H5Sselect_none(filespace);
187 }
188
189 hid_t const dataset =
190 H5Dcreate2(file, "/times", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT,
191 H5P_DEFAULT, H5P_DEFAULT);
192
193 H5Dwrite(dataset, H5T_NATIVE_DOUBLE, memspace, filespace, H5P_DEFAULT,
194 step_times.data());
195
196 H5Dclose(dataset);
197 H5Sclose(memspace);
198 H5Sclose(filespace);
199};
200namespace MeshLib::IO
201{
203{
204 hid_t const group;
205 std::map<std::string, hid_t> const datasets;
206 std::vector<HdfData> const variable_attributes;
207};
208
209HdfWriter::HdfWriter(std::vector<MeshHdfData> const& meshes,
210 unsigned long long const initial_step,
211 double const initial_time,
212 std::filesystem::path const& filepath,
213 bool const use_compression,
214 bool const is_file_manager,
215 unsigned int const n_files)
216 : _hdf5_filepath(filepath),
217 _file(createFile(filepath, n_files)),
218 _meshes_group(
219 H5Gcreate2(_file, "/meshes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)),
220 _step_times{initial_time},
221 _use_compression(checkCompression() && use_compression),
222 _is_file_manager(is_file_manager)
223{
224 for (auto const& mesh : meshes)
225 {
226 hid_t const group = H5Gcreate2(_meshes_group, mesh.name.c_str(),
227 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
228
229 auto const createAndWriteDataSet = [&](auto const& attribute) -> hid_t
230 {
231 hid_t const dataset = createDataSet(
232 attribute.data_type, attribute.data_space, attribute.file_space,
233 attribute.chunk_space, _use_compression, group, attribute.name);
234
235 checkHdfStatus(dataset, "Creating HDF5 Dataset: {:s} failed.",
236 attribute.name);
237
238 writeDataSet(attribute.data_start, attribute.data_type,
239 attribute.data_space, attribute.offsets,
240 attribute.file_space, attribute.chunk_space,
241 attribute.name, initial_step, dataset);
242 return dataset;
243 };
244
245 for (auto const& attribute : mesh.constant_attributes)
246 {
247 hid_t const dataset = createAndWriteDataSet(attribute);
248 H5Dclose(dataset);
249 }
250
251 std::map<std::string, hid_t> datasets;
252 for (auto const& attribute : mesh.variable_attributes)
253 {
254 hid_t const dataset = createAndWriteDataSet(attribute);
255 // datasets are kept open
256 datasets.insert({attribute.name, dataset});
257 }
258
259 _hdf_meshes.push_back(std::make_unique<HdfMesh>(
260 HdfMesh{group, datasets, mesh.variable_attributes}));
261 }
262}
263
265{
267
268 for (auto const& mesh : _hdf_meshes)
269 {
270 for (auto const& dataset : mesh->datasets)
271 {
272 if (auto const status = H5Dclose(dataset.second); status < 0)
273 {
274 ERR("Could not close dataset with id '{}' - status is '{}'.",
275 dataset.second, status);
276 }
277 }
278 if (auto const err = H5Gclose(mesh->group); err < 0)
279 {
280 ERR("Could not close group with group id '{}' - status is '{}'.",
281 mesh->group, err);
282 }
283 }
284 if (auto const group_err = H5Gclose(_meshes_group); group_err < 0)
285 {
286 ERR("Could not close group with group id '{}' - status is '{}'.",
287 _meshes_group, group_err);
288 }
289 if (auto const status = H5Fflush(_file, H5F_SCOPE_LOCAL); status < 0)
290 {
291 ERR("Could not flush data to file '{}' - status is '{}'.",
292 _hdf5_filepath.string(), status);
293 }
294 H5Fclose(_file);
295}
296
297void HdfWriter::writeStep(double const time)
298{
299 auto const output_step = _step_times.size();
300 _step_times.push_back(time);
301
302 for (auto const& mesh : _hdf_meshes)
303 {
304 for (auto const& attribute : mesh->variable_attributes)
305 {
306 auto const& dataset_hid = mesh->datasets.find(attribute.name);
307 if (dataset_hid == mesh->datasets.end())
308 {
309 OGS_FATAL("Writing HDF5 Dataset: '{:s}' to file '{}' failed.",
310 attribute.name, _hdf5_filepath.string());
311 }
312
314 attribute.data_start, attribute.data_type, attribute.data_space,
315 attribute.offsets, attribute.file_space, attribute.chunk_space,
316 attribute.name, output_step, mesh->datasets.at(attribute.name));
317 if (auto const flush_status = H5Fflush(_file, H5F_SCOPE_LOCAL);
318 flush_status < 0)
319 {
320 ERR("HdfWriter::writeStep(): Could not flush to file '{}' - "
321 "status is '{}'.",
322 _hdf5_filepath.string(), flush_status);
323 }
324 }
325 }
326}
327} // namespace MeshLib::IO
#define OGS_FATAL(...)
Definition Error.h:26
static void writeDataSet(void const *nodes_data, hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &offset_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, std::string const &dataset_name, Hdf5DimType const step, hid_t const dataset)
Assumes a dataset is already opened by createDatasetFunction.
static void writeTimeSeries(hid_t const file, std::vector< double > const &step_times, bool const is_file_manager)
Write vector with time values into open hdf file.
void checkHdfStatus(const hid_t status, fmt::format_string< Args... > formatting, Args &&... args)
Definition HdfWriter.cpp:22
static hid_t createDataSet(hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, bool const use_compression, hid_t const section, std::string const &dataset_name)
Definition HdfWriter.cpp:63
static bool checkCompression()
Definition HdfWriter.cpp:35
static unsigned short int const default_compression_factor
Definition HdfWriter.cpp:31
static std::vector< Hdf5DimType > prependDimension(Hdf5DimType const prepend_value, std::vector< Hdf5DimType > const &dimensions)
Definition HdfWriter.cpp:55
Writes vectorized data to HDF File.
void ERR(fmt::format_string< Args... > fmt, Args &&... args)
Definition Logging.h:45
void WARN(fmt::format_string< Args... > fmt, Args &&... args)
Definition Logging.h:40
bool const _use_compression
Definition HdfWriter.h:76
std::filesystem::path const _hdf5_filepath
Definition HdfWriter.h:71
std::vector< std::unique_ptr< HdfMesh > > _hdf_meshes
Definition HdfWriter.h:74
bool const _is_file_manager
Definition HdfWriter.h:77
hid_t const _meshes_group
Definition HdfWriter.h:73
void writeStep(double time)
Writes attributes. The data itself is hold by a structure outside of this class. The writer assumes t...
HdfWriter(std::vector< MeshHdfData > const &meshes, unsigned long long initial_step, double initial_time, std::filesystem::path const &filepath, bool use_compression, bool is_file_manager, unsigned int n_files)
Write file with geometry and topology data. The data itself is held by a structure outside of this cl...
std::vector< double > _step_times
Definition HdfWriter.h:75
Dispatches HDF5 functions specific to execution platform (w/o MPI). There are multiple implementation...
hsize_t Hdf5DimType
Definition HdfData.h:24
int64_t createHDF5TransferPolicy()
Definition fileIO.cpp:70
int64_t createFile(std::filesystem::path const &filepath, unsigned int n_files)
Definition fileIO.cpp:38
std::map< std::string, hid_t > const datasets
std::vector< HdfData > const variable_attributes