GridFormat 0.2.1
I/O-Library for grid-like data structures
Loading...
Searching...
No Matches
hdf_unstructured_grid_reader.hpp
Go to the documentation of this file.
1// SPDX-FileCopyrightText: 2022-2023 Dennis Gläser <dennis.glaeser@iws.uni-stuttgart.de>
2// SPDX-License-Identifier: MIT
8#ifndef GRIDFORMAT_VTK_HDF_UNSTRUCTURED_GRID_READER_HPP_
9#define GRIDFORMAT_VTK_HDF_UNSTRUCTURED_GRID_READER_HPP_
10#if GRIDFORMAT_HAVE_HIGH_FIVE
11
12#include <iterator>
13#include <optional>
14#include <algorithm>
15#include <type_traits>
16#include <concepts>
17#include <numeric>
18#include <utility>
19#include <cassert>
20
22#include <gridformat/common/field_transformations.hpp>
23#include <gridformat/common/concepts.hpp>
24#include <gridformat/common/exceptions.hpp>
26#include <gridformat/parallel/concepts.hpp>
28
29namespace GridFormat {
30
35template<Concepts::Communicator Communicator = NullCommunicator>
37 using HDF5File = HDF5::File<Communicator>;
38 static constexpr std::size_t vtk_space_dim = 3;
39 static constexpr bool read_rank_piece_only = !std::is_same_v<Communicator, NullCommunicator>;
40
41 public:
42 explicit VTKHDFUnstructuredGridReader() requires(std::is_same_v<Communicator, NullCommunicator>)
43 : _comm{}
44 {}
45
46 explicit VTKHDFUnstructuredGridReader(Communicator comm)
47 : _comm{comm}
48 {}
49
50 private:
51 std::string _name() const override {
52 if (_is_transient())
53 return "VTKHDFUnstructuredGridReader (transient)";
54 return "VTKHDFUnstructuredGridReader";
55 }
56
57 void _open(const std::string& filename, typename GridReader::FieldNames& field_names) override {
58 _close();
59 _file = HDF5File{filename, _comm, HDF5File::read_only};
60 const auto type = VTKHDF::get_file_type(_file.value());
61 if (type != "UnstructuredGrid")
62 throw ValueError("Incompatible VTK-HDF type: '" + type + "', expected 'UnstructuredGrid'.");
63
64 VTKHDF::check_version_compatibility(_file.value(), {2, 0});
65
66 if (_file->exists("VTKHDF/Steps"))
67 _file->visit_attribute("VTKHDF/Steps/NSteps", [&] (auto&& field) {
68 _num_steps.emplace();
69 field.export_to(_num_steps.value());
70 _step_index.emplace(0);
71 });
72
73 _compute_piece_offsets();
74 const auto copy_names_in = [&] (const std::string& group, auto& storage) {
75 if (_file->exists(group))
76 std::ranges::copy(_file->dataset_names_in(group), std::back_inserter(storage));
77 };
78 copy_names_in("VTKHDF/CellData", field_names.cell_fields);
79 copy_names_in("VTKHDF/PointData", field_names.point_fields);
80 copy_names_in("VTKHDF/FieldData", field_names.meta_data_fields);
81 }
82
83 void _close() override {
84 _file = {};
85 _num_cells = 0;
86 _num_points = 0;
87 _cell_offset = 0;
88 _point_offset = 0;
89 _num_steps = {};
90 _step_index = {};
91 }
92
93 void _compute_piece_offsets() {
94 _cell_offset = 0;
95 _point_offset = 0;
96 _check_communicator_size();
97 const auto num_step_cells = _number_of_all_piece_entities("Cells");
98 const auto num_step_points = _number_of_all_piece_entities("Points");
99 if constexpr (read_rank_piece_only) {
100 const auto my_rank = Parallel::rank(_comm);
101 _num_cells = num_step_cells.at(my_rank);
102 _num_points = num_step_points.at(my_rank);
103 _cell_offset += _accumulate_until(my_rank, num_step_cells);
104 _point_offset += _accumulate_until(my_rank, num_step_points);
105 } else {
106 _num_cells = _accumulate_until(num_step_cells.size(), num_step_cells);
107 _num_points = _accumulate_until(num_step_points.size(), num_step_points);
108 }
109 }
110
111 std::size_t _get_part_offset() const {
112 return _is_transient()
113 ? _access_file().template read_dataset_to<std::size_t>(
114 "/VTKHDF/Steps/PartOffsets",
115 HDF5::Slice{
116 .offset = {_step_index.value()},
117 .count = {1}
118 })
119 : std::size_t{0};
120 }
121
122 std::size_t _get_step_cells_offset() const {
123 return _is_transient()
124 ? _access_file().template read_dataset_to<std::size_t>(
125 "VTKHDF/Steps/CellOffsets",
126 HDF5::Slice{
127 .offset = {_step_index.value(), 0},
128 .count = {1, 1}
129 })
130 : std::size_t{0};
131 }
132
133 std::size_t _get_step_points_offset() const {
134 return _is_transient()
135 ? _access_file().template read_dataset_to<std::size_t>(
136 "VTKHDF/Steps/PointOffsets",
137 HDF5::Slice{
138 .offset = {_step_index.value()},
139 .count = {1}
140 })
141 : std::size_t{0};
142 }
143
144 std::size_t _get_connectivity_id_offset() const {
145 return _is_transient()
146 ? _access_file().template read_dataset_to<std::size_t>(
147 "VTKHDF/Steps/ConnectivityIdOffsets",
148 HDF5::Slice{
149 .offset = {_step_index.value(), 0},
150 .count = {1, 1}
151 })
152 : std::size_t{0};
153 }
154
155 std::size_t _get_cell_data_offset(std::string_view name) const {
156 return _is_transient()
157 ? _access_file().template read_dataset_to<std::size_t>(
158 "VTKHDF/Steps/CellDataOffsets/" + std::string{name},
159 HDF5::Slice{
160 .offset = {_step_index.value()},
161 .count = {1}
162 })
163 : std::size_t{0};
164 }
165
166 std::size_t _get_point_data_offset(std::string_view name) const {
167 return _is_transient()
168 ? _access_file().template read_dataset_to<std::size_t>(
169 "VTKHDF/Steps/PointDataOffsets/" + std::string{name},
170 HDF5::Slice{
171 .offset = {_step_index.value()},
172 .count = {1}
173 })
174 : std::size_t{0};
175 }
176
177 std::size_t _get_field_data_offset(std::string_view name) const {
178 return _is_transient()
179 ? _access_file().template read_dataset_to<std::size_t>(
180 "VTKHDF/Steps/FieldDataOffsets/" + std::string{name},
181 HDF5::Slice{
182 .offset = {_step_index.value()},
183 .count = {1}
184 })
185 : std::size_t{0};
186 }
187
188 bool _is_sequence() const override {
189 return _is_transient();
190 }
191
192 std::size_t _number_of_steps() const override {
193 if (!_num_steps)
194 throw ValueError("No step information available");
195 return _num_steps.value();
196 }
197
198 double _time_at_step(std::size_t step_idx) const override {
199 if (step_idx >= _number_of_steps())
200 throw ValueError("Only " + as_string(_number_of_steps()) + " available");
201 return _access_file().template read_dataset_to<double>("/VTKHDF/Steps/Values", HDF5::Slice{
202 .offset = {step_idx},
203 .count = {1}
204 });
205 }
206
207 void _set_step(std::size_t step_idx, typename GridReader::FieldNames&) override {
208 if (step_idx >= _number_of_steps())
209 throw ValueError("Only " + as_string(_number_of_steps()) + " available");
210 if (step_idx != _step_index.value()) {
211 _compute_piece_offsets();
212 _step_index = step_idx;
213 }
214 }
215
216 std::size_t _number_of_cells() const override {
217 return _num_cells;
218 }
219
220 std::size_t _number_of_points() const override {
221 return _num_points;
222 }
223
224 std::size_t _number_of_pieces() const override {
225 return _number_of_current_pieces_in_file();
226 }
227
228 std::size_t _number_of_current_pieces_in_file() const {
229 if (_is_transient())
230 return _number_of_pieces_in_file_at_step(_step_index.value());
231 return _total_number_of_pieces();
232 }
233
234 std::size_t _total_number_of_pieces() const {
235 auto ncells = _access_file().get_dimensions("/VTKHDF/NumberOfCells");
236 if (!ncells) throw IOError("Missing dataset at '/VTKHDF/NumberOfCells'");
237 if (ncells->size() != 1) throw IOError("Unexpected dimension of '/VTKHDF/NumberOfCells'");
238 return ncells->at(0);
239 }
240
241 std::size_t _number_of_pieces_in_file_at_step(std::size_t step) const {
242 if (!_is_transient())
243 throw InvalidState("Step data only available in transient files");
244 if (const auto& file = _access_file(); file.exists("/VTKHDF/Steps/NumberOfParts"))
245 return file.template read_dataset_to<std::vector<std::size_t>>("/VTKHDF/Steps/NumberOfParts").at(step);
246
247 // all steps have the same number of parts
248 if (_total_number_of_pieces()%_num_steps.value() != 0)
249 throw IOError(
250 "Cannot deduce the number of pieces. "
251 "The dataset '/VTKHDF/Steps/NumberOfParts' is not available, "
252 "but the total number of pieces is not divisble by the number of steps"
253 );
254 return _total_number_of_pieces()/_num_steps.value();
255 }
256
257 void _check_communicator_size() const {
258 if constexpr (!std::is_same_v<Communicator, NullCommunicator>)
259 if (_number_of_current_pieces_in_file() != static_cast<std::size_t>(Parallel::size(_comm)))
260 throw SizeError(
261 "Can only read the file in parallel if the size of the communicator matches the size "
262 "of that used when writing the file. Please read in the file sequentially on one process "
263 "and distribute the grid yourself, or restart the parallel run with "
264 + std::to_string(_number_of_current_pieces_in_file()) + " processes."
265 );
266 }
267
268 void _visit_cells(const typename GridReader::CellVisitor& visitor) const override {
269 if constexpr (read_rank_piece_only)
270 _visit_cells_for_rank(Parallel::rank(_comm), 0, visitor);
271 else
272 std::ranges::for_each(
273 std::views::iota(std::size_t{0}, _number_of_current_pieces_in_file()),
274 [&] (int rank) mutable {
275 const auto [_, base_offset] = _number_of_entities_and_offset_at_rank(rank, "Points");
276 _visit_cells_for_rank(rank, base_offset, visitor);
277 }
278 );
279 }
280
281 void _visit_cells_for_rank(int piece_rank,
282 std::size_t point_base_offset,
283 const typename GridReader::CellVisitor& visitor) const {
284 const auto& file = _access_file();
285 const auto [my_num_cells, my_cell_offset] = _number_of_entities_and_offset_at_rank(piece_rank, "Cells");
286 const auto num_connectivity_ids = file.template read_dataset_to<std::vector<std::size_t>>(
287 "VTKHDF/NumberOfConnectivityIds",
288 HDF5::Slice{
289 .offset = {_get_part_offset()},
290 .count = {_number_of_current_pieces_in_file()}
291 }
292 );
293 const auto my_num_connectivity_ids = num_connectivity_ids.at(piece_rank);
294 const auto my_id_offset = std::accumulate(
295 num_connectivity_ids.begin(),
296 num_connectivity_ids.begin() + piece_rank,
297 std::size_t{0}
298 );
299
300 auto offsets = file.template read_dataset_to<std::vector<std::size_t>>("VTKHDF/Offsets", {{
301 // offsets have length num_cells+1, so we need to add +1 per rank to the cell offsets
302 .offset = {
303 _get_step_cells_offset() + _accumulate_over_pieces_until_step(_step_index.value_or(0), 1) // offset from step
304 + my_cell_offset + piece_rank // offset from this piece in the pieces of the current step
305 },
306 .count = {my_num_cells + 1}
307 }});
308 auto types = file.template read_dataset_to<std::vector<std::uint_least8_t>>(
309 "VTKHDF/Types",
310 {{.offset = {_get_step_cells_offset() + my_cell_offset}, .count = {my_num_cells}}}
311 );
312 auto connectivity = file.template read_dataset_to<std::vector<std::size_t>>(
313 "VTKHDF/Connectivity",
314 {{.offset = {_get_connectivity_id_offset() + my_id_offset}, .count = {my_num_connectivity_ids}}}
315 );
316
317 std::vector<std::size_t> corners;
318 for (std::size_t i = 0; i < types.size(); ++i) {
319 assert(i < offsets.size() - 1);
320 assert(offsets.at(i+1) > offsets.at(i));
321 const auto p0_offset = offsets.at(i);
322 const auto num_corners = offsets.at(i+1) - p0_offset;
323
324 corners.resize(num_corners);
325 for (std::size_t c = 0; c < num_corners; ++c)
326 corners[c] = connectivity.at(p0_offset + c) + point_base_offset;
327 visitor(VTK::cell_type(types.at(i)), corners);
328 }
329 }
330
331 FieldPtr _points() const override {
332 const std::string path = "VTKHDF/Points";
334 _file.value(),
335 MDLayout{{_num_points, vtk_space_dim}},
336 _file.value().get_precision(path).value(),
337 _serialization_callback(path, HDF5::Slice{
338 .offset = {_point_offset + _get_step_points_offset(), 0},
339 .count = {_num_points, vtk_space_dim}
340 })
341 });
342 }
343
344 FieldPtr _cell_field(std::string_view name) const override {
345 const std::string path = "VTKHDF/CellData/" + std::string{name};
346 const auto slice = _get_slice(path, _num_cells, _cell_offset + _get_cell_data_offset(name));
348 _file.value(),
349 MDLayout{slice.count},
350 _file.value().get_precision(path).value(),
351 _serialization_callback(path, slice)
352 });
353 }
354
355 FieldPtr _point_field(std::string_view name) const override {
356 const std::string path = "VTKHDF/PointData/" + std::string{name};
357 const auto slice = _get_slice(path, _num_points, _point_offset + _get_point_data_offset(name));
359 _file.value(),
360 MDLayout{slice.count},
361 _file.value().get_precision(path).value(),
362 _serialization_callback(path, slice)
363 });
364 }
365
366 FieldPtr _meta_data_field(std::string_view name) const override {
367 const auto path = "VTKHDF/FieldData/" + std::string{name};
368 const auto dims = _file.value().get_dimensions(path).value();
369 if (dims.size() == 1)
370 return make_field_ptr(VTKHDF::DataSetField{_file.value(), path});
371 if (dims.size() != 2)
372 throw SizeError("Unexpected field data array size");
373 auto offset = dims;
374 auto count = offset;
375 count.at(0) = 1;
376 std::ranges::fill(offset, 0);
377 offset.at(0) = _get_field_data_offset(name);
379 _file.value(),
380 MDLayout{count | std::views::drop(1)},
381 _file.value().get_precision(path).value(),
382 [p=path, o=offset, c=count] (const HDF5File& f) {
383 return f.visit_dataset(p, [&] <typename F> (F&& field) {
384 return FlattenedField{make_field_ptr(std::move(field))}.serialized();
385 }, HDF5::Slice{.offset = o, .count = c});
386 }
387 });
388 }
389
390 template<Concepts::Scalar T>
391 T _read_rank_scalar(const std::string& path, const std::size_t base_offset = 0) const {
392 T result;
393 _file.value().visit_dataset(
394 path,
395 [&] (auto&& field) { field.export_to(result); },
396 HDF5::Slice{
397 .offset = {base_offset + static_cast<std::size_t>(Parallel::rank(_comm))},
398 .count = {1}
399 }
400 );
401 return result;
402 }
403
404 HDF5::Slice _get_slice(const std::string& path,
405 std::size_t count,
406 std::size_t offset) const {
407 auto ds_size = _file.value().get_dimensions(path).value();
408 auto ds_offset = ds_size;
409 std::ranges::fill(ds_offset, std::size_t{0});
410 ds_size.at(0) = count;
411 ds_offset.at(0) = offset;
412 return {.offset = ds_offset, .count = ds_size};
413 }
414
415 auto _serialization_callback(std::string path, std::optional<HDF5::Slice> slice = {}) const {
416 return [_p=std::move(path), _s=std::move(slice)] (const HDF5File& file) {
417 if (_s)
418 return file.visit_dataset(_p, [&] <typename F> (F&& f) { return f.serialized(); }, _s.value());
419 return file.visit_dataset(_p, [&] <typename F> (F&& f) { return f.serialized(); });
420 };
421 }
422
423 bool _is_transient() const {
424 return static_cast<bool>(_num_steps);
425 }
426
427 const HDF5File& _access_file() const {
428 if (!_file.has_value())
429 throw InvalidState("No file has been read");
430 return _file.value();
431 }
432
433 std::vector<std::size_t> _number_of_all_piece_entities(const std::string& entity_type) const {
434 return _access_file().template read_dataset_to<std::vector<std::size_t>>("/VTKHDF/NumberOf" + entity_type, {{
435 .offset = {_get_part_offset()},
436 .count = {_number_of_current_pieces_in_file()}
437 }});
438 }
439
440 template<std::integral T>
441 std::array<std::size_t, 2> _number_of_entities_and_offset_at_rank(T rank, const std::string& entity_type) const {
442 const auto entities = _number_of_all_piece_entities(entity_type);
443 return std::array{entities.at(rank), _accumulate_until(rank, entities)};
444 }
445
446 template<typename T>
447 T _accumulate_until(std::integral auto rank, const std::vector<T>& values) const {
448 return std::accumulate(values.begin(), values.begin() + rank, T{0});
449 }
450
451 std::size_t _accumulate_over_pieces_until_step(std::size_t step, std::size_t count_per_piece) const {
452 if (step == 0)
453 return 0;
454 if (!_is_transient())
455 throw InvalidState("Step data only available in transient files");
456
457 std::size_t result = 0;
458 std::ranges::for_each(std::views::iota(std::size_t{0}, step), [&] (std::size_t step_idx) {
459 const auto num_step_pieces = _number_of_pieces_in_file_at_step(step_idx);
460 result += count_per_piece*num_step_pieces;
461 });
462 return result;
463 }
464
465 Communicator _comm;
466 std::optional<HDF5File> _file;
467 std::size_t _num_cells = 0;
468 std::size_t _num_points = 0;
469 std::size_t _cell_offset = 0;
470 std::size_t _point_offset = 0;
471 std::optional<std::size_t> _num_steps;
472 std::optional<std::size_t> _step_index;
473};
474
475} // namespace GridFormat
476
477#endif // GRIDFORMAT_HAVE_HIGH_FIVE
478#endif // GRIDFORMAT_VTK_HDF_UNSTRUCTURED_GRID_READER_HPP_
Abstract base class for all readers, defines the common interface.
Definition: reader.hpp:51
const std::string & filename() const
Return the name of the opened grid file (empty string until open() is called)
Definition: reader.hpp:92
std::string name() const
Return the name of this reader.
Definition: reader.hpp:73
Reader for the VTK-HDF file format for unstructured grids.
Definition: hdf_unstructured_grid_reader.hpp:36
Field implementation that draws values from an open HDF5 file upon request.
Definition: hdf_common.hpp:104
Base class for grid data readers.
std::shared_ptr< const Field > FieldPtr
Pointer type used by writers/readers for fields.
Definition: field.hpp:186
FieldPtr make_field_ptr(F &&f)
Factory function for field pointers.
Definition: field.hpp:192
Common functionality for writing VTK HDF files.
Definition: reader.hpp:266