Skip to content
Snippets Groups Projects
Commit 5225ca23 authored by Elias Pipping's avatar Elias Pipping
Browse files

Kill file again that was revived by accident

parent 822ed5d4
Branches releases/2.4-2
No related tags found
No related merge requests found
Pipeline #
#ifndef DUNE_FUFEM_HDF5_HDF5_SEQUENCE_IO_HH
#define DUNE_FUFEM_HDF5_HDF5_SEQUENCE_IO_HH
#include <array>
#include <functional>
#include <numeric>
#include <vector>
#include <hdf5.h>
#include <dune/common/fvector.hh>
#include <dune/istl/bvector.hh>
#include <dune/istl/matrix.hh>
#include "hdf5file.hh"
#include "hdf5typetraits.hh"
#include "frombuffer.hh"
#include "tobuffer.hh"
template <int spatialDimensions, typename ctype = double, typename T = hsize_t>
class HDF5SequenceIO {
int static const dimensions = 1 + spatialDimensions;
public:
template <typename... Args>
HDF5SequenceIO(HDF5Grouplike &file, std::string datasetname, Args... args)
: file_(file), capacity_{ { T(args)... } } {
static_assert(sizeof...(args) == spatialDimensions,
"wrong number of arguments");
if (file_.hasDataset(datasetname)) {
dset_ = file_.openDataset(datasetname);
if (not validDatasetDimensions() or not validDatatype())
DUNE_THROW(Dune::Exception, "Type/Size mismatch in dataset.");
} else {
auto const initial_dims = maxExtent(0);
auto const max_dims = maxExtent(H5S_UNLIMITED);
hid_t file_space =
H5Screate_simple(dimensions, initial_dims.data(), max_dims.data());
hid_t plist = H5Pcreate(H5P_DATASET_CREATE);
H5Pset_layout(plist, H5D_CHUNKED);
auto const chunk_dims = maxExtent(1);
H5Pset_chunk(plist, dimensions, chunk_dims.data());
dset_ = file_.createDataset(datasetname, file_space, plist,
HDF5TypeTraits<ctype>::getType());
H5Pclose(plist);
H5Sclose(file_space);
}
}
~HDF5SequenceIO() { H5Dclose(dset_); }
void add(size_t timeStep, std::vector<ctype> const &buffer) {
assert(buffer.size() == std::accumulate(capacity_.begin(), capacity_.end(),
1, std::multiplies<T>()));
hid_t file_space = H5Dget_space(dset_);
std::array<T, dimensions> dims;
H5Sget_simple_extent_dims(file_space, dims.data(), NULL);
if (timeStep >= dims[0]) {
dims[0] = timeStep + 1;
H5Dset_extent(dset_, dims.data());
// H5Dset_extent invalidates the file space
H5Sclose(file_space);
file_space = H5Dget_space(dset_);
}
auto const start = minExtent(timeStep);
auto const count = maxExtent(1);
H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start.data(), NULL,
count.data(), NULL);
hid_t mem_space = H5Screate_simple(dimensions, count.data(), NULL);
H5Dwrite(dset_, HDF5TypeTraits<ctype>::getType(), mem_space, file_space,
H5P_DEFAULT, buffer.data());
H5Sclose(mem_space);
H5Sclose(file_space);
}
void read(size_t timeStep, std::vector<ctype> &buffer,
std::array<T, spatialDimensions> &capacity) {
buffer.resize(std::accumulate(capacity_.begin(), capacity_.end(), 1,
std::multiplies<T>()));
hid_t file_space = H5Dget_space(dset_);
std::array<T, dimensions> dims;
H5Sget_simple_extent_dims(file_space, dims.data(), NULL);
if (timeStep >= dims[0]) {
DUNE_THROW(Dune::Exception, "Reading non-existant entry.");
} else {
auto const start = minExtent(timeStep);
auto const count = maxExtent(1);
H5Sselect_hyperslab(file_space, H5S_SELECT_SET, start.data(), NULL,
count.data(), NULL);
hid_t mem_space = H5Screate_simple(dimensions, count.data(), NULL);
H5Dread(dset_, HDF5TypeTraits<ctype>::getType(), mem_space, file_space,
H5P_DEFAULT, buffer.data());
H5Sclose(mem_space);
}
H5Sclose(file_space);
capacity = capacity_;
}
private:
bool validDatasetDimensions() {
bool ret = true;
hid_t file_space = H5Dget_space(dset_);
std::array<T, dimensions> dims;
H5Sget_simple_extent_dims(file_space, dims.data(), NULL);
for (size_t i = 0; i < spatialDimensions; ++i)
if (capacity_[i] != dims[i + dimensions - spatialDimensions]) {
ret = false;
break;
}
H5Sclose(file_space);
return ret;
}
bool validDatatype() {
return H5Tequal(H5Dget_type(dset_), HDF5TypeTraits<ctype>::getType());
}
std::array<T, dimensions> minExtent(T timeSteps) {
std::array<T, dimensions> ret;
ret[0] = timeSteps;
std::fill(ret.begin() + 1, ret.end(), 0);
return ret;
}
std::array<T, dimensions> maxExtent(T timeSteps) {
std::array<T, dimensions> ret;
ret[0] = timeSteps;
std::copy(capacity_.begin(), capacity_.end(), ret.begin() + 1);
return ret;
}
HDF5Grouplike &file_;
std::array<T, spatialDimensions> capacity_;
hid_t dset_;
};
template <int spatialDimensions, typename ctype, typename T, class Data>
void addEntry(HDF5SequenceIO<spatialDimensions, ctype, T> &writer,
size_t timeStep, Data const &data) {
std::vector<ctype> buffer;
toBuffer(data, buffer);
writer.add(timeStep, buffer);
}
template <int spatialDimensions, typename ctype, typename T, class Data>
void readEntry(HDF5SequenceIO<spatialDimensions, ctype, T> &reader,
size_t timeStep, Data &data) {
std::vector<ctype> buffer;
std::array<T, spatialDimensions> dimensions;
reader.read(timeStep, buffer, dimensions);
fromBuffer(buffer, dimensions, data);
}
#endif
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment