69#ifdef STORM_HAVE_LIBARCHIVE
70 using BucketType =
decltype(std::declval<storm::storage::BitVector&>().getBucket({}));
71 constexpr bool IsBitVector = std::is_same_v<T, bool>;
72 using DataType = std::conditional_t<IsBitVector, BucketType, T>;
73 constexpr bool NativeEndianness = Endianness == std::endian::native;
74 STORM_LOG_THROW(_currentEntry, storm::exceptions::FileIoException,
"No valid entry loaded.");
78 auto entrySize = archive_entry_size(_currentEntry);
79 checkResult(_archive, entrySize);
80 entrySize = std::max<decltype(entrySize)>(entrySize, 0);
81 STORM_LOG_THROW((entrySize %
sizeof(DataType) == 0), storm::exceptions::FileIoException,
82 "Archive entry '" << name() <<
"' can not be extracted as vector of a " <<
sizeof(DataType) <<
"-bytes type: File size " << entrySize
83 <<
" bytes is not a multiple of " <<
sizeof(DataType) <<
" bytes.");
84 if constexpr (IsBitVector) {
85 content.resize(entrySize * 8);
88 content.reserve(entrySize /
sizeof(DataType));
91 [[maybe_unused]] uint64_t bucketCount = 0;
93 auto append = [&content, &bucketCount](std::ranges::input_range
auto&& data) {
94 if constexpr (IsBitVector) {
95 content.grow(bucketCount + data.size() *
sizeof(BucketType) * 8);
96 for (
auto bits : data) {
105 content.insert(content.end(), data.begin(), data.end());
109 static_assert(BufferSize %
sizeof(DataType) == 0,
"Buffer size should be a multiple of sizeof(DataType).");
110 std::array<char, BufferSize> buffer;
111 auto bytesRead = archive_read_data(_archive, buffer.data(), BufferSize);
112 checkResult(_archive, bytesRead);
115 uint64_t
const numValues = bytesRead /
sizeof(DataType);
116 if constexpr (NativeEndianness ||
sizeof(DataType) == 1) {
117 append(std::span<const DataType>(
reinterpret_cast<const DataType*
>(buffer.data()), numValues));
119 append(std::span<const DataType>(
reinterpret_cast<const DataType*
>(buffer.data()), numValues) |
120 std::ranges::views::transform(storm::utility::byteSwap<DataType>));
124 uint64_t offsetBytes = bytesRead %
sizeof(DataType);
125 if (offsetBytes > 0 && numValues > 0) {
128 STORM_LOG_ASSERT(
static_cast<uint64_t
>(bytesRead) == numValues *
sizeof(DataType) + offsetBytes,
130 STORM_LOG_ASSERT(
static_cast<uint64_t
>(bytesRead - offsetBytes) > offsetBytes,
132 std::copy(buffer.data() + bytesRead - offsetBytes, buffer.data() + bytesRead, buffer.data());
134 bytesRead = archive_read_data(_archive, buffer.data() + offsetBytes, BufferSize - offsetBytes);
135 checkResult(_archive, bytesRead);
136 if (bytesRead == 0) {
138 offsetBytes == 0, storm::exceptions::FileIoException,
139 "Archive entry could not be extracted as vector of a " <<
sizeof(DataType) <<
"-bytes type: " << offsetBytes <<
" bytes left in the buffer.");
142 bytesRead += offsetBytes;
146 if constexpr (IsBitVector) {
147 content.resize(bucketCount *
sizeof(BucketType) * 8);
149 content.shrink_to_fit();
156 STORM_LOG_THROW(
false, storm::exceptions::MissingLibraryException,
"Reading archives is not supported. Storm is compiled without LibArchive.");
161#ifdef STORM_HAVE_LIBARCHIVE
164 auto const entrySize = archive_entry_size(_currentEntry);
165 checkResult(_archive, entrySize);
166 content.reserve(std::max<
decltype(entrySize)>(entrySize, 0));
168 std::array<char, BufferSize> buffer;
169 la_ssize_t bytesRead = 0;
170 while ((bytesRead = archive_read_data(_archive, buffer.data(), BufferSize)) > 0) {
171 content.append(buffer.data(), bytesRead);
173 STORM_LOG_THROW(bytesRead >= 0, storm::exceptions::FileIoException,
"Failed to read data from archive. " << archive_error_string(_archive) <<
".");
174 content.shrink_to_fit();
180 STORM_LOG_THROW(
false, storm::exceptions::MissingLibraryException,
"Reading archives is not supported. Storm is compiled without LibArchive.");
185ArchiveReader::Iterator::Iterator(std::filesystem::path
const& filename) : _archive(archive_read_new(), ArchiveDeleter{}), _currentEntry(nullptr) {
240 if (!std::filesystem::is_regular_file(file)) {
244#ifdef STORM_HAVE_LIBARCHIVE
246 struct archive* a = archive_read_new();
250 archive_read_support_filter_all(a);
251 archive_read_support_format_all(a);
252 if (archive_read_open_filename(a, file.string().c_str(), 10240) != ARCHIVE_OK) {
253 archive_read_free(a);
256 struct archive_entry* entry =
nullptr;
257 auto const r = archive_read_next_header(a, &entry);
258 bool const result = (r == ARCHIVE_OK || r == ARCHIVE_EOF || r == ARCHIVE_WARN);
259 archive_read_free(a);
263 std::ifstream in(file, std::ios::binary);
268 std::array<unsigned char, 512> buf;
269 in.read(
reinterpret_cast<char*
>(buf.data()), buf.size());
270 std::streamsize
const n = in.gcount();
274 std::span<unsigned char> bytes(buf.data(),
static_cast<std::size_t
>(n));
275 auto starts_with_bytes = [&bytes](std::initializer_list<unsigned char> magic, std::size_t offset = 0) ->
bool {
276 return bytes.size() >= magic.size() + offset && std::equal(magic.begin(), magic.end(), bytes.begin() + offset);
280 std::initializer_list<unsigned char>
const gz{0x1F, 0x8B},
281 xz{0xFD,
'7',
'z',
'X',
'Z', 0x00},
282 tar{
'u',
's',
't',
'a',
'r'},
283 zip{0x50, 0x4B, 0x03, 0x04}, zipEmpty{0x50, 0x4B, 0x05, 0x06}, zipSpanned{0x50, 0x4B, 0x07, 0x08};
284 return starts_with_bytes(gz) || starts_with_bytes(xz) || starts_with_bytes(tar, 257) || starts_with_bytes(zip) || starts_with_bytes(zipEmpty) ||
285 starts_with_bytes(zipSpanned);