8#ifndef GRIDFORMAT_VTK_HDF_UNSTRUCTURED_GRID_READER_HPP_
9#define GRIDFORMAT_VTK_HDF_UNSTRUCTURED_GRID_READER_HPP_
10#if GRIDFORMAT_HAVE_HIGH_FIVE
22#include <gridformat/common/field_transformations.hpp>
23#include <gridformat/common/concepts.hpp>
24#include <gridformat/common/exceptions.hpp>
26#include <gridformat/parallel/concepts.hpp>
35template<Concepts::Communicator Communicator = NullCommunicator>
37 using HDF5File = HDF5::File<Communicator>;
38 static constexpr std::size_t vtk_space_dim = 3;
39 static constexpr bool read_rank_piece_only = !std::is_same_v<Communicator, NullCommunicator>;
51 std::string _name()
const override {
53 return "VTKHDFUnstructuredGridReader (transient)";
54 return "VTKHDFUnstructuredGridReader";
59 _file = HDF5File{
filename, _comm, HDF5File::read_only};
60 const auto type = VTKHDF::get_file_type(_file.value());
61 if (type !=
"UnstructuredGrid")
62 throw ValueError(
"Incompatible VTK-HDF type: '" + type +
"', expected 'UnstructuredGrid'.");
64 VTKHDF::check_version_compatibility(_file.value(), {2, 0});
66 if (_file->exists(
"VTKHDF/Steps"))
67 _file->visit_attribute(
"VTKHDF/Steps/NSteps", [&] (
auto&& field) {
69 field.export_to(_num_steps.value());
70 _step_index.emplace(0);
73 _compute_piece_offsets();
74 const auto copy_names_in = [&] (
const std::string& group,
auto& storage) {
75 if (_file->exists(group))
76 std::ranges::copy(_file->dataset_names_in(group), std::back_inserter(storage));
78 copy_names_in(
"VTKHDF/CellData", field_names.cell_fields);
79 copy_names_in(
"VTKHDF/PointData", field_names.point_fields);
80 copy_names_in(
"VTKHDF/FieldData", field_names.meta_data_fields);
83 void _close()
override {
93 void _compute_piece_offsets() {
96 _check_communicator_size();
97 const auto num_step_cells = _number_of_all_piece_entities(
"Cells");
98 const auto num_step_points = _number_of_all_piece_entities(
"Points");
99 if constexpr (read_rank_piece_only) {
100 const auto my_rank = Parallel::rank(_comm);
101 _num_cells = num_step_cells.at(my_rank);
102 _num_points = num_step_points.at(my_rank);
103 _cell_offset += _accumulate_until(my_rank, num_step_cells);
104 _point_offset += _accumulate_until(my_rank, num_step_points);
106 _num_cells = _accumulate_until(num_step_cells.size(), num_step_cells);
107 _num_points = _accumulate_until(num_step_points.size(), num_step_points);
111 std::size_t _get_part_offset()
const {
112 return _is_transient()
113 ? _access_file().template read_dataset_to<std::size_t>(
114 "/VTKHDF/Steps/PartOffsets",
116 .offset = {_step_index.value()},
122 std::size_t _get_step_cells_offset()
const {
123 return _is_transient()
124 ? _access_file().template read_dataset_to<std::size_t>(
125 "VTKHDF/Steps/CellOffsets",
127 .offset = {_step_index.value(), 0},
133 std::size_t _get_step_points_offset()
const {
134 return _is_transient()
135 ? _access_file().template read_dataset_to<std::size_t>(
136 "VTKHDF/Steps/PointOffsets",
138 .offset = {_step_index.value()},
144 std::size_t _get_connectivity_id_offset()
const {
145 return _is_transient()
146 ? _access_file().template read_dataset_to<std::size_t>(
147 "VTKHDF/Steps/ConnectivityIdOffsets",
149 .offset = {_step_index.value(), 0},
155 std::size_t _get_cell_data_offset(std::string_view
name)
const {
156 return _is_transient()
157 ? _access_file().template read_dataset_to<std::size_t>(
158 "VTKHDF/Steps/CellDataOffsets/" + std::string{
name},
160 .offset = {_step_index.value()},
166 std::size_t _get_point_data_offset(std::string_view
name)
const {
167 return _is_transient()
168 ? _access_file().template read_dataset_to<std::size_t>(
169 "VTKHDF/Steps/PointDataOffsets/" + std::string{
name},
171 .offset = {_step_index.value()},
177 std::size_t _get_field_data_offset(std::string_view
name)
const {
178 return _is_transient()
179 ? _access_file().template read_dataset_to<std::size_t>(
180 "VTKHDF/Steps/FieldDataOffsets/" + std::string{
name},
182 .offset = {_step_index.value()},
188 bool _is_sequence()
const override {
189 return _is_transient();
192 std::size_t _number_of_steps()
const override {
194 throw ValueError(
"No step information available");
195 return _num_steps.value();
198 double _time_at_step(std::size_t step_idx)
const override {
199 if (step_idx >= _number_of_steps())
200 throw ValueError(
"Only " + as_string(_number_of_steps()) +
" available");
201 return _access_file().template read_dataset_to<double>(
"/VTKHDF/Steps/Values", HDF5::Slice{
202 .offset = {step_idx},
208 if (step_idx >= _number_of_steps())
209 throw ValueError(
"Only " + as_string(_number_of_steps()) +
" available");
210 if (step_idx != _step_index.value()) {
211 _compute_piece_offsets();
212 _step_index = step_idx;
216 std::size_t _number_of_cells()
const override {
220 std::size_t _number_of_points()
const override {
224 std::size_t _number_of_pieces()
const override {
225 return _number_of_current_pieces_in_file();
228 std::size_t _number_of_current_pieces_in_file()
const {
230 return _number_of_pieces_in_file_at_step(_step_index.value());
231 return _total_number_of_pieces();
234 std::size_t _total_number_of_pieces()
const {
235 auto ncells = _access_file().get_dimensions(
"/VTKHDF/NumberOfCells");
236 if (!ncells)
throw IOError(
"Missing dataset at '/VTKHDF/NumberOfCells'");
237 if (ncells->size() != 1)
throw IOError(
"Unexpected dimension of '/VTKHDF/NumberOfCells'");
238 return ncells->at(0);
241 std::size_t _number_of_pieces_in_file_at_step(std::size_t step)
const {
242 if (!_is_transient())
243 throw InvalidState(
"Step data only available in transient files");
244 if (
const auto& file = _access_file(); file.exists(
"/VTKHDF/Steps/NumberOfParts"))
245 return file.template read_dataset_to<std::vector<std::size_t>>(
"/VTKHDF/Steps/NumberOfParts").at(step);
248 if (_total_number_of_pieces()%_num_steps.value() != 0)
250 "Cannot deduce the number of pieces. "
251 "The dataset '/VTKHDF/Steps/NumberOfParts' is not available, "
252 "but the total number of pieces is not divisble by the number of steps"
254 return _total_number_of_pieces()/_num_steps.value();
257 void _check_communicator_size()
const {
258 if constexpr (!std::is_same_v<Communicator, NullCommunicator>)
259 if (_number_of_current_pieces_in_file() !=
static_cast<std::size_t
>(Parallel::size(_comm)))
261 "Can only read the file in parallel if the size of the communicator matches the size "
262 "of that used when writing the file. Please read in the file sequentially on one process "
263 "and distribute the grid yourself, or restart the parallel run with "
264 + std::to_string(_number_of_current_pieces_in_file()) +
" processes."
268 void _visit_cells(
const typename GridReader::CellVisitor& visitor)
const override {
269 if constexpr (read_rank_piece_only)
270 _visit_cells_for_rank(Parallel::rank(_comm), 0, visitor);
272 std::ranges::for_each(
273 std::views::iota(std::size_t{0}, _number_of_current_pieces_in_file()),
274 [&] (
int rank)
mutable {
275 const auto [_, base_offset] = _number_of_entities_and_offset_at_rank(rank,
"Points");
276 _visit_cells_for_rank(rank, base_offset, visitor);
281 void _visit_cells_for_rank(
int piece_rank,
282 std::size_t point_base_offset,
283 const typename GridReader::CellVisitor& visitor)
const {
284 const auto& file = _access_file();
285 const auto [my_num_cells, my_cell_offset] = _number_of_entities_and_offset_at_rank(piece_rank,
"Cells");
286 const auto num_connectivity_ids = file.template read_dataset_to<std::vector<std::size_t>>(
287 "VTKHDF/NumberOfConnectivityIds",
289 .offset = {_get_part_offset()},
290 .count = {_number_of_current_pieces_in_file()}
293 const auto my_num_connectivity_ids = num_connectivity_ids.at(piece_rank);
294 const auto my_id_offset = std::accumulate(
295 num_connectivity_ids.begin(),
296 num_connectivity_ids.begin() + piece_rank,
300 auto offsets = file.template read_dataset_to<std::vector<std::size_t>>(
"VTKHDF/Offsets", {{
303 _get_step_cells_offset() + _accumulate_over_pieces_until_step(_step_index.value_or(0), 1)
304 + my_cell_offset + piece_rank
306 .count = {my_num_cells + 1}
308 auto types = file.template read_dataset_to<std::vector<std::uint_least8_t>>(
310 {{.offset = {_get_step_cells_offset() + my_cell_offset}, .count = {my_num_cells}}}
312 auto connectivity = file.template read_dataset_to<std::vector<std::size_t>>(
313 "VTKHDF/Connectivity",
314 {{.offset = {_get_connectivity_id_offset() + my_id_offset}, .count = {my_num_connectivity_ids}}}
317 std::vector<std::size_t> corners;
318 for (std::size_t i = 0; i < types.size(); ++i) {
319 assert(i < offsets.size() - 1);
320 assert(offsets.at(i+1) > offsets.at(i));
321 const auto p0_offset = offsets.at(i);
322 const auto num_corners = offsets.at(i+1) - p0_offset;
324 corners.resize(num_corners);
325 for (std::size_t c = 0; c < num_corners; ++c)
326 corners[c] = connectivity.at(p0_offset + c) + point_base_offset;
327 visitor(VTK::cell_type(types.at(i)), corners);
332 const std::string path =
"VTKHDF/Points";
335 MDLayout{{_num_points, vtk_space_dim}},
336 _file.value().get_precision(path).value(),
337 _serialization_callback(path, HDF5::Slice{
338 .offset = {_point_offset + _get_step_points_offset(), 0},
339 .count = {_num_points, vtk_space_dim}
344 FieldPtr _cell_field(std::string_view
name)
const override {
345 const std::string path =
"VTKHDF/CellData/" + std::string{
name};
346 const auto slice = _get_slice(path, _num_cells, _cell_offset + _get_cell_data_offset(
name));
349 MDLayout{slice.count},
350 _file.value().get_precision(path).value(),
351 _serialization_callback(path, slice)
355 FieldPtr _point_field(std::string_view
name)
const override {
356 const std::string path =
"VTKHDF/PointData/" + std::string{
name};
357 const auto slice = _get_slice(path, _num_points, _point_offset + _get_point_data_offset(
name));
360 MDLayout{slice.count},
361 _file.value().get_precision(path).value(),
362 _serialization_callback(path, slice)
366 FieldPtr _meta_data_field(std::string_view
name)
const override {
367 const auto path =
"VTKHDF/FieldData/" + std::string{
name};
368 const auto dims = _file.value().get_dimensions(path).value();
369 if (dims.size() == 1)
371 if (dims.size() != 2)
372 throw SizeError(
"Unexpected field data array size");
376 std::ranges::fill(offset, 0);
377 offset.at(0) = _get_field_data_offset(
name);
380 MDLayout{count | std::views::drop(1)},
381 _file.value().get_precision(path).value(),
382 [p=path, o=offset, c=count] (
const HDF5File& f) {
383 return f.visit_dataset(p, [&] <
typename F> (F&& field) {
384 return FlattenedField{
make_field_ptr(std::move(field))}.serialized();
385 }, HDF5::Slice{.offset = o, .count = c});
390 template<Concepts::Scalar T>
391 T _read_rank_scalar(
const std::string& path,
const std::size_t base_offset = 0)
const {
393 _file.value().visit_dataset(
395 [&] (
auto&& field) { field.export_to(result); },
397 .offset = {base_offset +
static_cast<std::size_t
>(Parallel::rank(_comm))},
404 HDF5::Slice _get_slice(
const std::string& path,
406 std::size_t offset)
const {
407 auto ds_size = _file.value().get_dimensions(path).value();
408 auto ds_offset = ds_size;
409 std::ranges::fill(ds_offset, std::size_t{0});
410 ds_size.at(0) = count;
411 ds_offset.at(0) = offset;
412 return {.offset = ds_offset, .count = ds_size};
415 auto _serialization_callback(std::string path, std::optional<HDF5::Slice> slice = {})
const {
416 return [_p=std::move(path), _s=std::move(slice)] (
const HDF5File& file) {
418 return file.visit_dataset(_p, [&] <
typename F> (F&& f) {
return f.serialized(); }, _s.value());
419 return file.visit_dataset(_p, [&] <
typename F> (F&& f) {
return f.serialized(); });
423 bool _is_transient()
const {
424 return static_cast<bool>(_num_steps);
427 const HDF5File& _access_file()
const {
428 if (!_file.has_value())
429 throw InvalidState(
"No file has been read");
430 return _file.value();
433 std::vector<std::size_t> _number_of_all_piece_entities(
const std::string& entity_type)
const {
434 return _access_file().template read_dataset_to<std::vector<std::size_t>>(
"/VTKHDF/NumberOf" + entity_type, {{
435 .offset = {_get_part_offset()},
436 .count = {_number_of_current_pieces_in_file()}
440 template<std::
integral T>
441 std::array<std::size_t, 2> _number_of_entities_and_offset_at_rank(T rank,
const std::string& entity_type)
const {
442 const auto entities = _number_of_all_piece_entities(entity_type);
443 return std::array{entities.at(rank), _accumulate_until(rank, entities)};
447 T _accumulate_until(std::integral
auto rank,
const std::vector<T>& values)
const {
448 return std::accumulate(values.begin(), values.begin() + rank, T{0});
451 std::size_t _accumulate_over_pieces_until_step(std::size_t step, std::size_t count_per_piece)
const {
454 if (!_is_transient())
455 throw InvalidState(
"Step data only available in transient files");
457 std::size_t result = 0;
458 std::ranges::for_each(std::views::iota(std::size_t{0}, step), [&] (std::size_t step_idx) {
459 const auto num_step_pieces = _number_of_pieces_in_file_at_step(step_idx);
460 result += count_per_piece*num_step_pieces;
466 std::optional<HDF5File> _file;
467 std::size_t _num_cells = 0;
468 std::size_t _num_points = 0;
469 std::size_t _cell_offset = 0;
470 std::size_t _point_offset = 0;
471 std::optional<std::size_t> _num_steps;
472 std::optional<std::size_t> _step_index;
Base class for grid data readers.
std::shared_ptr< const Field > FieldPtr
Pointer type used by writers/readers for fields.
Definition: field.hpp:186
FieldPtr make_field_ptr(F &&f)
Factory function for field pointers.
Definition: field.hpp:192
Common functionality for writing VTK HDF files.