OGS
HdfWriter.cpp
Go to the documentation of this file.
1// SPDX-FileCopyrightText: Copyright (c) OpenGeoSys Community (opengeosys.org)
2// SPDX-License-Identifier: BSD-3-Clause
3
4#include "HdfWriter.h"
5
6#include <hdf5.h>
7
8#include <string>
9#include <utility>
10#include <vector>
11
12#include "BaseLib/Error.h"
13#include "BaseLib/Logging.h"
14#include "fileIO.h"
15template <typename... Args>
16void checkHdfStatus(const hid_t status, fmt::format_string<Args...> formatting,
17 Args&&... args)
18{
19 if (status < 0)
20 {
21 OGS_FATAL(formatting, std::forward<Args>(args)...);
22 }
23}
24
25static unsigned short int const default_compression_factor = 1;
26
27using namespace MeshLib::IO;
28
29static bool checkCompression()
30{
31 // Check if gzip compression is available and can be used for both
32 // compression and decompression.
33 if (htri_t avail = H5Zfilter_avail(H5Z_FILTER_DEFLATE); !avail)
34 {
35 WARN("gzip filter not available.\n");
36 return false;
37 }
38 unsigned int filter_info;
39 H5Zget_filter_info(H5Z_FILTER_DEFLATE, &filter_info);
40 if (!(filter_info & H5Z_FILTER_CONFIG_ENCODE_ENABLED) ||
41 !(filter_info & H5Z_FILTER_CONFIG_DECODE_ENABLED))
42 {
43 WARN("gzip filter not available for encoding and decoding.\n");
44 return false;
45 }
46 return true;
47}
48
49static std::vector<Hdf5DimType> prependDimension(
50 Hdf5DimType const prepend_value, std::vector<Hdf5DimType> const& dimensions)
51{
52 std::vector<Hdf5DimType> dims = {prepend_value};
53 dims.insert(dims.end(), dimensions.begin(), dimensions.end());
54 return dims;
55}
56
57static hid_t createDataSet(
58 hid_t const data_type, std::vector<Hdf5DimType> const& data_dims,
59 std::vector<Hdf5DimType> const& max_dims,
60 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
61 bool const use_compression, hid_t const section,
62 std::string const& dataset_name)
63{
64 int const time_dim_local_size = data_dims.size() + 1;
65
66 std::vector<Hdf5DimType> const time_max_dims =
67 prependDimension(H5S_UNLIMITED, max_dims);
68 std::vector<Hdf5DimType> const time_data_global_dims =
69 prependDimension(1, max_dims);
70
71 std::vector<Hdf5DimType> const time_data_chunk_dims =
72 prependDimension(1, chunk_dims);
73
74 hid_t const fspace =
75 H5Screate_simple(time_dim_local_size, time_data_global_dims.data(),
76 time_max_dims.data());
77 assert(fspace >= 0);
78
79 hid_t const dcpl = H5Pcreate(H5P_DATASET_CREATE);
80 assert(dcpl >= 0);
81
82 hid_t const status =
83 H5Pset_chunk(dcpl, chunk_dims.size() + 1, time_data_chunk_dims.data());
84 if (status < 0)
85 {
86 OGS_FATAL("H5Pset_layout failed for data set: {:s}.", dataset_name);
87 }
88
89 if (use_compression)
90 {
91 H5Pset_deflate(dcpl, default_compression_factor);
92 }
93
94 hid_t const dataset = H5Dcreate2(section, dataset_name.c_str(), data_type,
95 fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT);
96
97 assert(dataset >= 0);
98 H5Pclose(dcpl);
99 assert(H5Sclose(fspace) >= 0);
100
101 return dataset;
102}
103
109static void writeDataSet(
110 void const* nodes_data, // what
111 hid_t const data_type,
112 std::vector<Hdf5DimType> const& data_dims, // how ...
113 std::vector<Hdf5DimType> const& offset_dims,
114 std::vector<Hdf5DimType> const& max_dims,
115 [[maybe_unused]] std::vector<Hdf5DimType> const& chunk_dims,
116 std::string const& dataset_name, Hdf5DimType const step,
117 hid_t const dataset) // where
118{
119 Hdf5DimType const time_steps = step + 1;
120
121 std::vector<Hdf5DimType> const time_data_local_dims = data_dims;
122 std::vector<Hdf5DimType> const time_max_dims =
123 prependDimension(time_steps, max_dims);
124 std::vector<Hdf5DimType> const time_offsets =
125 prependDimension(step, offset_dims);
126 std::vector<hsize_t> const count =
127 prependDimension(1, time_data_local_dims);
128
129 hid_t const io_transfer_property = createHDF5TransferPolicy();
130
131 hid_t const mspace = H5Screate_simple(time_data_local_dims.size(),
132 time_data_local_dims.data(), NULL);
133 assert(H5Sselect_all(mspace) >= 0);
134
135 hid_t status = H5Dset_extent(dataset, time_max_dims.data());
136 if (status < 0)
137 {
138 OGS_FATAL("H5D set extent failed dataset '{:s}'.", dataset_name);
139 }
140 hid_t const fspace = H5Dget_space(dataset);
141
142 H5Sselect_hyperslab(fspace, H5S_SELECT_SET, time_offsets.data(), NULL,
143 count.data(), NULL);
144
145 status = H5Dwrite(dataset, data_type, mspace, fspace, io_transfer_property,
146 nodes_data);
147 if (status < 0)
148 {
149 OGS_FATAL("H5Dwrite failed in dataset '{:s}'.", dataset_name);
150 }
151
152 H5Sclose(mspace);
153 H5Pclose(io_transfer_property);
154
155 return;
156}
157
164static void writeTimeSeries(hid_t const file,
165 std::vector<double> const& step_times,
166 bool const is_file_manager)
167{
168 hsize_t const size = step_times.size();
169 hid_t const memspace = H5Screate_simple(1, &size, NULL);
170 hid_t const filespace = H5Screate_simple(1, &size, NULL);
171
172 if (is_file_manager)
173 {
174 H5Sselect_all(memspace);
175 H5Sselect_all(filespace);
176 }
177 else
178 {
179 H5Sselect_none(memspace);
180 H5Sselect_none(filespace);
181 }
182
183 hid_t const dataset =
184 H5Dcreate2(file, "/times", H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT,
185 H5P_DEFAULT, H5P_DEFAULT);
186
187 H5Dwrite(dataset, H5T_NATIVE_DOUBLE, memspace, filespace, H5P_DEFAULT,
188 step_times.data());
189
190 H5Dclose(dataset);
191 H5Sclose(memspace);
192 H5Sclose(filespace);
193};
194namespace MeshLib::IO
195{
197{
198 hid_t const group;
199 std::map<std::string, hid_t> const datasets;
200 std::vector<HdfData> const variable_attributes;
201};
202
203HdfWriter::HdfWriter(std::vector<MeshHdfData> const& meshes,
204 unsigned long long const initial_step,
205 double const initial_time,
206 std::filesystem::path const& filepath,
207 bool const use_compression,
208 bool const is_file_manager,
209 unsigned int const n_files)
210 : _hdf5_filepath(filepath),
211 _file(createFile(filepath, n_files)),
213 H5Gcreate2(_file, "/meshes", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)),
214 _step_times{initial_time},
215 _use_compression(checkCompression() && use_compression),
216 _is_file_manager(is_file_manager)
217{
218 for (auto const& mesh : meshes)
219 {
220 hid_t const group = H5Gcreate2(_meshes_group, mesh.name.c_str(),
221 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
222
223 auto const createAndWriteDataSet = [&](auto const& attribute) -> hid_t
224 {
225 hid_t const dataset = createDataSet(
226 attribute.data_type, attribute.data_space, attribute.file_space,
227 attribute.chunk_space, _use_compression, group, attribute.name);
228
229 checkHdfStatus(dataset, "Creating HDF5 Dataset: {:s} failed.",
230 attribute.name);
231
232 writeDataSet(attribute.data_start, attribute.data_type,
233 attribute.data_space, attribute.offsets,
234 attribute.file_space, attribute.chunk_space,
235 attribute.name, initial_step, dataset);
236 return dataset;
237 };
238
239 for (auto const& attribute : mesh.constant_attributes)
240 {
241 hid_t const dataset = createAndWriteDataSet(attribute);
242 H5Dclose(dataset);
243 }
244
245 std::map<std::string, hid_t> datasets;
246 for (auto const& attribute : mesh.variable_attributes)
247 {
248 hid_t const dataset = createAndWriteDataSet(attribute);
249 // datasets are kept open
250 datasets.insert({attribute.name, dataset});
251 }
252
253 _hdf_meshes.push_back(std::make_unique<HdfMesh>(
254 HdfMesh{group, datasets, mesh.variable_attributes}));
255 }
256}
257
259{
261
262 for (auto const& mesh : _hdf_meshes)
263 {
264 for (auto const& dataset : mesh->datasets)
265 {
266 if (auto const status = H5Dclose(dataset.second); status < 0)
267 {
268 ERR("Could not close dataset with id '{}' - status is '{}'.",
269 dataset.second, status);
270 }
271 }
272 if (auto const err = H5Gclose(mesh->group); err < 0)
273 {
274 ERR("Could not close group with group id '{}' - status is '{}'.",
275 mesh->group, err);
276 }
277 }
278 if (auto const group_err = H5Gclose(_meshes_group); group_err < 0)
279 {
280 ERR("Could not close group with group id '{}' - status is '{}'.",
281 _meshes_group, group_err);
282 }
283 if (auto const status = H5Fflush(_file, H5F_SCOPE_LOCAL); status < 0)
284 {
285 ERR("Could not flush data to file '{}' - status is '{}'.",
286 _hdf5_filepath.string(), status);
287 }
288 H5Fclose(_file);
289}
290
291void HdfWriter::writeStep(double const time)
292{
293 auto const output_step = _step_times.size();
294 _step_times.push_back(time);
295
296 for (auto const& mesh : _hdf_meshes)
297 {
298 for (auto const& attribute : mesh->variable_attributes)
299 {
300 auto const& dataset_hid = mesh->datasets.find(attribute.name);
301 if (dataset_hid == mesh->datasets.end())
302 {
303 OGS_FATAL("Writing HDF5 Dataset: '{:s}' to file '{}' failed.",
304 attribute.name, _hdf5_filepath.string());
305 }
306
308 attribute.data_start, attribute.data_type, attribute.data_space,
309 attribute.offsets, attribute.file_space, attribute.chunk_space,
310 attribute.name, output_step, mesh->datasets.at(attribute.name));
311 if (auto const flush_status = H5Fflush(_file, H5F_SCOPE_LOCAL);
312 flush_status < 0)
313 {
314 ERR("HdfWriter::writeStep(): Could not flush to file '{}' - "
315 "status is '{}'.",
316 _hdf5_filepath.string(), flush_status);
317 }
318 }
319 }
320}
321} // namespace MeshLib::IO
#define OGS_FATAL(...)
Definition Error.h:19
static void writeDataSet(void const *nodes_data, hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &offset_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, std::string const &dataset_name, Hdf5DimType const step, hid_t const dataset)
Assumes a dataset is already opened by createDatasetFunction.
static void writeTimeSeries(hid_t const file, std::vector< double > const &step_times, bool const is_file_manager)
Write vector with time values into open hdf file.
void checkHdfStatus(const hid_t status, fmt::format_string< Args... > formatting, Args &&... args)
Definition HdfWriter.cpp:16
static hid_t createDataSet(hid_t const data_type, std::vector< Hdf5DimType > const &data_dims, std::vector< Hdf5DimType > const &max_dims, std::vector< Hdf5DimType > const &chunk_dims, bool const use_compression, hid_t const section, std::string const &dataset_name)
Definition HdfWriter.cpp:57
static bool checkCompression()
Definition HdfWriter.cpp:29
static unsigned short int const default_compression_factor
Definition HdfWriter.cpp:25
static std::vector< Hdf5DimType > prependDimension(Hdf5DimType const prepend_value, std::vector< Hdf5DimType > const &dimensions)
Definition HdfWriter.cpp:49
void ERR(fmt::format_string< Args... > fmt, Args &&... args)
Definition Logging.h:40
void WARN(fmt::format_string< Args... > fmt, Args &&... args)
Definition Logging.h:34
bool const _use_compression
Definition HdfWriter.h:67
std::filesystem::path const _hdf5_filepath
Definition HdfWriter.h:62
std::vector< std::unique_ptr< HdfMesh > > _hdf_meshes
Definition HdfWriter.h:65
bool const _is_file_manager
Definition HdfWriter.h:68
hid_t const _meshes_group
Definition HdfWriter.h:64
void writeStep(double time)
Writes attributes. The data itself is hold by a structure outside of this class. The writer assumes t...
HdfWriter(std::vector< MeshHdfData > const &meshes, unsigned long long initial_step, double initial_time, std::filesystem::path const &filepath, bool use_compression, bool is_file_manager, unsigned int n_files)
Write file with geometry and topology data. The data itself is held by a structure outside of this cl...
std::vector< double > _step_times
Definition HdfWriter.h:66
hsize_t Hdf5DimType
Definition HdfData.h:15
int64_t createHDF5TransferPolicy()
int64_t createFile(std::filesystem::path const &filepath, unsigned int n_files)
std::map< std::string, hid_t > const datasets
std::vector< HdfData > const variable_attributes