OGS
HdfWriter.cpp
Go to the documentation of this file.
1
10#include "HdfWriter.h"
11
12#include <hdf5.h>
13
14#include <string>
15#include <utility>
16#include <vector>
17
18#include "BaseLib/Error.h"
19#include "BaseLib/Logging.h"
20#include "fileIO.h"
21template <typename... Args>
22void checkHdfStatus(const hid_t status, fmt::format_string<Args...> formatting,
23 Args&&... args)
24{
25 if (status < 0)
26 {
27 OGS_FATAL(formatting, std::forward<Args>(args)...);
28 }
29}
30
31static unsigned short int const default_compression_factor = 1;
32
33using namespace MeshLib::IO;
34
35static bool checkCompression()
36{
37 // Check if gzip compression is available and can be used for both
38 // compression and decompression.
39 if (htri_t avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE); !avail)
40 {
41 WARN("gzip filter not available.\n");
42 return false;
43 }
44 unsigned int filter_info;
45 H5Zget_filter_info(H5Z_FILTER_DEFLATE, &filter_info);
46 if (!(filter_info & H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
47 !(filter_info & H5Z_FILTER_CONFIG_DECODE_ENABLED))
48 {
49 WARN("gzip filter not available for encoding and decoding.\n");
50 return false;
51 }
52 return true;
53}
54
55static std::vector<Hdf5DimType> prependDimension(
56 Hdf5DimType const prepend_value, std::vector<Hdf5DimType> const& dimensions)
57{
58 std::vector<Hdf5DimType> dims = {prepend_value};
59 dims.insert(dims.end(), dimensions.begin(), dimensions.end());
60 return dims;
61}
62
63static hid_t createDataSet(
64 hid_t const data_type, std::vector<Hdf5DimType> const& data_dims,
65 std::vector<Hdf5DimType> const& max_dims,
66 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
67 bool const use_compression, hid_t const section,
68 std::string const& dataset_name)
69{
70 int const time_dim_local_size = data_dims.size() + 1;
71
72 std::vector<Hdf5DimType> const time_max_dims =
73 prependDimension(H5S_UNLIMITED, max_dims);
74 std::vector<Hdf5DimType> const time_data_global_dims =
75 prependDimension(1, max_dims);
76
77 std::vector<Hdf5DimType> const time_data_chunk_dims =
78 prependDimension(1, chunk_dims);
79
80 hid_t const fspace =
81 H5Screate_simple(time_dim_local_size, time_data_global_dims.data(),
82 time_max_dims.data());
83 assert(fspace >= 0);
84
85 hid_t const dcpl = H5Pcreate(H5P_DATASET_CREATE);
86 assert(dcpl >= 0);
87
88 hid_t const status =
89 H5Pset_chunk(dcpl, chunk_dims.size() + 1, time_data_chunk_dims.data());
90 if (status < 0)
91 {
92 OGS_FATAL("H5Pset_layout failed for data set: {:s}.", dataset_name);
93 }
94
95 if (use_compression)
96 {
97 H5Pset_deflate(dcpl, default_compression_factor);
98 }
99
100 hid_t const dataset = H5Dcreate2(section, dataset_name.c_str(), data_type,
101 fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
102
103 assert(dataset >= 0);
104 H5Pclose(dcpl);
105 assert(H5Sclose(fspace) >= 0);
106
107 return dataset;
108}
115static void writeDataSet(
116 void const* nodes_data, // what
117 hid_t const data_type,
118 std::vector<Hdf5DimType> const& data_dims, // how ...
119 std::vector<Hdf5DimType> const& offset_dims,
120 std::vector<Hdf5DimType> const& max_dims,
121 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
122 std::string const& dataset_name, Hdf5DimType const step,
123 hid_t const dataset) // where
124{
125 Hdf5DimType const time_steps = step + 1;
126
127 std::vector<Hdf5DimType> const time_data_local_dims = data_dims;
128 std::vector<Hdf5DimType> const time_max_dims =
129 prependDimension(time_steps, max_dims);
130 std::vector<Hdf5DimType> const time_offsets =
131 prependDimension(step, offset_dims);
132 std::vector<hsize_t> const count =
133 prependDimension(1, time_data_local_dims);
134
135 hid_t const io_transfer_property = createHDF5TransferPolicy();
136
137 hid_t const mspace = H5Screate_simple(time_data_local_dims.size(),
138 time_data_local_dims.data(), NULL);
139 assert(H5Sselect_all(mspace) >= 0);
140
141 hid_t status = H5Dset_extent(dataset, time_max_dims.data());
142 if (status < 0)
143 {
144 OGS_FATAL("H5D set extent failed dataset '{:s}'.", dataset_name);
145 }
146 hid_t const fspace = H5Dget_space(dataset);
147
148 H5Sselect_hyperslab(fspace, H5S_SELECT_SET, time_offsets.data(), NULL,
149 count.data(), NULL);
150
151 status = H5Dwrite(dataset, data_type, mspace, fspace, io_transfer_property,
152 nodes_data);
153 if (status < 0)
154 {
155 OGS_FATAL("H5Dwrite failed in dataset '{:s}'.", dataset_name);
156 }
157
158 H5Sclose(mspace);
159 H5Pclose(io_transfer_property);
160
161 return;
162}
163
170static void writeTimeSeries(hid_t const file,
171 std::vector<double> const& step_times,
172 bool const is_file_manager)
173{
174 hsize_t const size = step_times.size();
175 hid_t const memspace = H5Screate_simple(1, &size, NULL);
176 hid_t const filespace = H5Screate_simple(1, &size, NULL);
177
178 if (is_file_manager)
179 {
180 H5Sselect_all(memspace);
181 H5Sselect_all(filespace);
182 }
183 else
184 {
185 H5Sselect_none(memspace);
186 H5Sselect_none(filespace);
187 }
188
189 hid_t const dataset =
190 H5Dcreate2(file, "/times", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT,
191 H5P_DEFAULT, H5P_DEFAULT);
192
193 H5Dwrite(dataset, H5T_NATIVE_DOUBLE, memspace, filespace, H5P_DEFAULT,
194 step_times.data());
195
196 H5Dclose(dataset);
197 H5Sclose(memspace);
198 H5Sclose(filespace);
199};
200namespace MeshLib::IO
201{
203{
204 hid_t const group;
205 std::map<std::string, hid_t> const datasets;
206 std::vector<HdfData> const variable_attributes;
207};
208
209HdfWriter::HdfWriter(std::vector<MeshHdfData> const& meshes,
210 unsigned long long const initial_step,
211 std::filesystem::path const& filepath,
212 bool const use_compression,
213 bool const is_file_manager,
214 unsigned int const n_files)
215 : _hdf5_filepath(filepath),
216 _file(createFile(filepath, n_files)),
217 _meshes_group(
218 H5Gcreate2(_file, "/meshes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)),
219 _step_times{0}, // ToDo need to be initial time
220 _use_compression(checkCompression() && use_compression),
221 _is_file_manager(is_file_manager)
222{
223 for (auto const& mesh : meshes)
224 {
225 hid_t const group = H5Gcreate2(_meshes_group, mesh.name.c_str(),
226 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
227
228 auto const createAndWriteDataSet = [&](auto const& attribute) -> hid_t
229 {
230 hid_t const dataset = createDataSet(
231 attribute.data_type, attribute.data_space, attribute.file_space,
232 attribute.chunk_space, _use_compression, group, attribute.name);
233
234 checkHdfStatus(dataset, "Creating HDF5 Dataset: {:s} failed.",
235 attribute.name);
236
237 writeDataSet(attribute.data_start, attribute.data_type,
238 attribute.data_space, attribute.offsets,
239 attribute.file_space, attribute.chunk_space,
240 attribute.name, initial_step, dataset);
241 return dataset;
242 };
243
244 for (auto const& attribute : mesh.constant_attributes)
245 {
246 hid_t const dataset = createAndWriteDataSet(attribute);
247 H5Dclose(dataset);
248 }
249
250 std::map<std::string, hid_t> datasets;
251 for (auto const& attribute : mesh.variable_attributes)
252 {
253 hid_t const dataset = createAndWriteDataSet(attribute);
254 // datasets are kept open
255 datasets.insert({attribute.name, dataset});
256 }
257
258 _hdf_meshes.push_back(std::make_unique<HdfMesh>(
259 HdfMesh{group, datasets, mesh.variable_attributes}));
260 }
261}
262
264{
266
267 for (auto const& mesh : _hdf_meshes)
268 {
269 for (auto const& dataset : mesh->datasets)
270 {
271 H5Dclose(dataset.second);
272 }
273 H5Gclose(mesh->group);
274 }
275 H5Gclose(_meshes_group);
276 H5Fclose(_file);
277}
278
279void HdfWriter::writeStep(double const time)
280{
281 auto const output_step = _step_times.size();
282 _step_times.push_back(time);
283
284 for (auto const& mesh : _hdf_meshes)
285 {
286 for (auto const& attribute : mesh->variable_attributes)
287 {
288 auto const& dataset_hid = mesh->datasets.find(attribute.name);
289 if (dataset_hid == mesh->datasets.end())
290 {
291 OGS_FATAL("Writing HDF5 Dataset: {:s} failed.", attribute.name);
292 }
293
295 attribute.data_start, attribute.data_type, attribute.data_space,
296 attribute.offsets, attribute.file_space, attribute.chunk_space,
297 attribute.name, output_step, mesh->datasets.at(attribute.name));
298 }
299 }
300}
301} // namespace MeshLib::IO
#define OGS_FATAL(...)
Definition Error.h:26
static void writeDataSet(void const *nodes_data, hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &offset_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, std::string const &dataset_name, Hdf5DimType const step, hid_t const dataset)
Assumes a dataset is already opened by createDatasetFunction.
static void writeTimeSeries(hid_t const file, std::vector< double > const &step_times, bool const is_file_manager)
Write vector with time values into open hdf file.
void checkHdfStatus(const hid_t status, fmt::format_string< Args... > formatting, Args &&... args)
Definition HdfWriter.cpp:22
static hid_t createDataSet(hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, bool const use_compression, hid_t const section, std::string const &dataset_name)
Definition HdfWriter.cpp:63
static bool checkCompression()
Definition HdfWriter.cpp:35
static unsigned short int const default_compression_factor
Definition HdfWriter.cpp:31
static std::vector< Hdf5DimType > prependDimension(Hdf5DimType const prepend_value, std::vector< Hdf5DimType > const &dimensions)
Definition HdfWriter.cpp:55
Writes vectorized data to HDF File.
void WARN(fmt::format_string< Args... > fmt, Args &&... args)
Definition Logging.h:40
bool const _use_compression
Definition HdfWriter.h:73
HdfWriter(std::vector< MeshHdfData > const &meshes, unsigned long long initial_step, std::filesystem::path const &filepath, bool use_compression, bool is_file_manager, unsigned int n_files)
Write file with geometry and topology data. The data itself is held by a structure outside of this cl...
std::vector< std::unique_ptr< HdfMesh > > _hdf_meshes
Definition HdfWriter.h:71
bool const _is_file_manager
Definition HdfWriter.h:74
hid_t const _meshes_group
Definition HdfWriter.h:70
void writeStep(double time)
Writes attributes. The data itself is hold by a structure outside of this class. The writer assumes t...
std::vector< double > _step_times
Definition HdfWriter.h:72
Dispatches HDF5 functions specific to execution platform (w/o MPI). There are multiple implementation...
hsize_t Hdf5DimType
Definition HdfData.h:24
int64_t createHDF5TransferPolicy()
Definition fileIO.cpp:70
int64_t createFile(std::filesystem::path const &filepath, unsigned int n_files)
Definition fileIO.cpp:38
std::map< std::string, hid_t > const datasets
std::vector< HdfData > const variable_attributes