diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index e71575d2d44..eec5be96092 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -4,6 +4,7 @@ #include "nix/store/path-info.hh" #include "nix/store/nar-info.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/tests/characterization.hh" #include "nix/store/tests/libstore.hh" @@ -65,7 +66,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) }; info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz"; - info.compression = "xz"; + info.compression = CompressionAlgo::xz; info.fileHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="); info.fileSize = 4029176; } diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 61480ec1c78..4c57584238d 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -165,7 +165,7 @@ ref BinaryCacheStore::addToStoreCommon( auto info = mkInfo(narHashSink.finish()); auto narInfo = make_ref(info); - narInfo->compression = config.compression.to_string(); // FIXME: Make NarInfo use CompressionAlgo + narInfo->compression = config.compression; auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; narInfo->fileSize = fileSize; diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index cab11f7a14b..62168fb94c4 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -3,6 +3,7 @@ #include "nix/store/store-api.hh" #include "nix/store/globals.hh" #include "nix/util/archive.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/compression.hh" #include "nix/util/file-system.hh" @@ -54,7 +55,8 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx) } #endif - auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); + auto decompressor = makeDecompressionSink( + unpack && hasSuffix(mainUrl, ".xz") ? CompressionAlgo::xz : CompressionAlgo::none, sink); fileTransfer->download(std::move(request), *decompressor); decompressor->finish(); }); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index c55f0cb42ca..6e76fe07c6c 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -1,5 +1,6 @@ #include "nix/store/filetransfer.hh" #include "nix/store/globals.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/config-global.hh" #include "nix/store/store-api.hh" #include "nix/util/compression.hh" @@ -106,7 +107,7 @@ struct curlFileTransfer : public FileTransfer curlSList requestHeaders; - std::string encoding; + std::optional encoding; bool acceptRanges = false; @@ -288,7 +289,7 @@ struct curlFileTransfer : public FileTransfer result.bodySize = 0; statusMsg = trim(match.str(1)); acceptRanges = false; - encoding = ""; + encoding = std::nullopt; appendCurrentUrl(); } else { @@ -312,7 +313,7 @@ struct curlFileTransfer : public FileTransfer } else if (name == "content-encoding") - encoding = trim(line.substr(i + 1)); + encoding = parseCompressionAlgo(trim(line.substr(i + 1))); else if (name == "accept-ranges" && toLower(trim(line.substr(i + 1))) == "bytes") acceptRanges = true; @@ -738,7 +739,7 @@ struct curlFileTransfer : public FileTransfer sink, we can only retry if the server supports ranged requests. */ if (err == Transient && attempt < request.tries - && (!this->request.dataCallback || writtenToSink == 0 || (acceptRanges && encoding.empty()))) { + && (!this->request.dataCallback || writtenToSink == 0 || (acceptRanges && !encoding.has_value()))) { int ms = retryTimeMs * std::pow( 2.0f, attempt - 1 + std::uniform_real_distribution<>(0.0, 0.5)(fileTransfer.mt19937)); diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 7403dc1d445..3ab3c6576e9 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -13,7 +13,8 @@ struct StoreDirConfig; struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo { std::string url; - std::string compression; // FIXME: Use CompressionAlgo + + std::optional compression; std::optional fileHash; uint64_t fileSize = 0; diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index ce35bab3e3e..9b71c815f3a 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -1,4 +1,5 @@ #include "nix/util/archive.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/posix-source-accessor.hh" #include "nix/store/store-api.hh" #include "nix/store/local-fs-store.hh" @@ -130,7 +131,7 @@ std::optional LocalFSStore::getBuildLogExact(const StorePath & path else if (pathExists(logBz2Path)) { try { - return decompress("bzip2", readFile(logBz2Path)); + return decompress(CompressionAlgo::bzip2, readFile(logBz2Path)); } catch (Error &) { } } diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index 18932ca1250..cc71dc561e1 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -1,4 +1,5 @@ #include "nix/store/nar-info-disk-cache.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/users.hh" #include "nix/util/sync.hh" #include "nix/store/sqlite.hh" @@ -269,7 +270,7 @@ struct NarInfoDiskCacheImpl : NarInfoDiskCache auto narInfo = make_ref( cache.storeDir, StorePath(hashPart + "-" + namePart), Hash::parseAnyPrefixed(queryNAR.getStr(6))); narInfo->url = queryNAR.getStr(2); - narInfo->compression = queryNAR.getStr(3); + narInfo->compression = parseCompressionAlgo(queryNAR.getStr(3)); if (!queryNAR.isNull(4)) narInfo->fileHash = Hash::parseAnyPrefixed(queryNAR.getStr(4)); narInfo->fileSize = queryNAR.getInt(5); @@ -334,7 +335,8 @@ struct NarInfoDiskCacheImpl : NarInfoDiskCache state->insertNAR .use()(cache.id)(hashPart) (std::string(info->path.name()))( - narInfo ? narInfo->url : "", narInfo != 0)(narInfo ? narInfo->compression : "", narInfo != 0)( + narInfo ? narInfo->url : "", + narInfo != 0)(narInfo ? showCompressionAlgo(narInfo->compression.value()) : "", narInfo != 0)( narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash)( narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)(info->narHash.to_string( diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index b912467d8a8..f157d6d0b2e 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -1,8 +1,10 @@ #include "nix/store/globals.hh" #include "nix/store/nar-info.hh" #include "nix/store/store-api.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/strings.hh" #include "nix/util/json-utils.hh" +#include namespace nix { @@ -52,7 +54,7 @@ NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std: } else if (name == "URL") url = value; else if (name == "Compression") - compression = value; + compression = value.empty() ? std::nullopt : std::make_optional(parseCompressionAlgo(value)); else if (name == "FileHash") fileHash = parseHashField(value); else if (name == "FileSize") { @@ -90,8 +92,8 @@ NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std: line += 1; } - if (compression == "") - compression = "bzip2"; + if (!compression.has_value()) + compression = CompressionAlgo::bzip2; if (!havePath || !haveNarHash || url.empty() || narSize == 0) { line = 0; // don't include line information in the error @@ -109,8 +111,8 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const std::string res; res += "StorePath: " + store.printStorePath(path) + "\n"; res += "URL: " + url + "\n"; - assert(compression != ""); - res += "Compression: " + compression + "\n"; + assert(compression.has_value()); + res += "Compression: " + showCompressionAlgo(compression.value()) + "\n"; assert(fileHash && fileHash->algo == HashAlgorithm::SHA256); res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; @@ -142,8 +144,8 @@ UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo, Pat if (includeImpureInfo) { if (!url.empty()) jsonObject["url"] = url; - if (!compression.empty()) - jsonObject["compression"] = compression; + if (compression.has_value()) + jsonObject["compression"] = showCompressionAlgo(compression.value()); if (fileHash) { if (format == PathInfoJsonFormat::V1) jsonObject["downloadHash"] = fileHash->to_string(HashFormat::SRI, true); @@ -170,8 +172,11 @@ UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nloh if (auto * url = get(obj, "url")) res.url = getString(*url); - if (auto * compression = get(obj, "compression")) - res.compression = getString(*compression); + if (auto * compression = get(obj, "compression")) { + auto compression_value = getString(*compression); + res.compression = + compression_value.empty() ? std::nullopt : std::make_optional(parseCompressionAlgo(compression_value)); + } if (auto * downloadHash = get(obj, "downloadHash")) { if (format == PathInfoJsonFormat::V1) diff --git a/src/libutil-tests/compression.cc b/src/libutil-tests/compression.cc index 53d476fa859..11850301894 100644 --- a/src/libutil-tests/compression.cc +++ b/src/libutil-tests/compression.cc @@ -16,7 +16,8 @@ TEST(compress, noneMethodDoesNothingToTheInput) TEST(decompress, decompressNoneCompressed) { - auto method = "none"; + + auto method = CompressionAlgo::none; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; auto o = decompress(method, str); @@ -27,7 +28,7 @@ TEST(decompress, decompressEmptyCompressed) { // Empty-method decompression used e.g. by S3 store // (Content-Encoding == ""). - auto method = ""; + auto method = CompressionAlgo::none; // Do we handle this in S3 store??? auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; auto o = decompress(method, str); @@ -36,7 +37,7 @@ TEST(decompress, decompressEmptyCompressed) TEST(decompress, decompressXzCompressed) { - auto method = "xz"; + auto method = CompressionAlgo::xz; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; auto o = decompress(method, compress(CompressionAlgo::xz, str)); @@ -45,7 +46,7 @@ TEST(decompress, decompressXzCompressed) TEST(decompress, decompressBzip2Compressed) { - auto method = "bzip2"; + auto method = CompressionAlgo::bzip2; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; auto o = decompress(method, compress(CompressionAlgo::bzip2, str)); @@ -54,7 +55,7 @@ TEST(decompress, decompressBzip2Compressed) TEST(decompress, decompressBrCompressed) { - auto method = "br"; + auto method = CompressionAlgo::brotli; auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; auto o = decompress(method, compress(CompressionAlgo::brotli, str)); @@ -63,7 +64,7 @@ TEST(decompress, decompressBrCompressed) TEST(decompress, decompressInvalidInputThrowsCompressionError) { - auto method = "bzip2"; + auto method = CompressionAlgo::bzip2; auto str = "this is a string that does not qualify as valid bzip2 data"; ASSERT_THROW(decompress(method, str), CompressionError); @@ -88,7 +89,7 @@ TEST(makeCompressionSink, compressAndDecompress) { StringSink strSink; auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf"; - auto decompressionSink = makeDecompressionSink("bzip2", strSink); + auto decompressionSink = makeDecompressionSink(CompressionAlgo::bzip2, strSink); auto sink = makeCompressionSink(CompressionAlgo::bzip2, *decompressionSink); (*sink)(inputString); diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc index 8b55c44a2db..3928eeeb9bd 100644 --- a/src/libutil/compression.cc +++ b/src/libutil/compression.cc @@ -1,4 +1,5 @@ #include "nix/util/compression.hh" +#include "nix/util/compression-algo.hh" #include "nix/util/signals.hh" #include "nix/util/tarfile.hh" #include "nix/util/finally.hh" @@ -38,9 +39,9 @@ struct ArchiveDecompressionSource : Source { std::unique_ptr archive = 0; Source & src; - std::optional compressionMethod; + std::optional compressionMethod; - ArchiveDecompressionSource(Source & src, std::optional compressionMethod = std::nullopt) + ArchiveDecompressionSource(Source & src, std::optional compressionMethod = std::nullopt) : src(src) , compressionMethod(std::move(compressionMethod)) { @@ -239,7 +240,7 @@ struct BrotliDecompressionSink : ChunkedCompressionSink } }; -std::string decompress(const std::string & method, std::string_view in) +std::string decompress(const std::optional & method, std::string_view in) { StringSink ssink; auto sink = makeDecompressionSink(method, ssink); @@ -248,11 +249,11 @@ std::string decompress(const std::string & method, std::string_view in) return std::move(ssink.s); } -std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink) +std::unique_ptr makeDecompressionSink(const std::optional & method, Sink & nextSink) { - if (method == "none" || method == "" || method == "identity") + if (!method.has_value() || method == CompressionAlgo::none) return std::make_unique(nextSink); - else if (method == "br") + else if (method == CompressionAlgo::brotli) return std::make_unique(nextSink); else return sourceToSink([method, &nextSink](Source & source) { diff --git a/src/libutil/include/nix/util/compression-algo.hh b/src/libutil/include/nix/util/compression-algo.hh index fe1fc57596c..434655dc944 100644 --- a/src/libutil/include/nix/util/compression-algo.hh +++ b/src/libutil/include/nix/util/compression-algo.hh @@ -7,6 +7,7 @@ namespace nix { +// Do we want to add Identity to the list??? #define NIX_FOR_EACH_COMPRESSION_ALGO(MACRO) \ MACRO("none", none) \ MACRO("br", brotli) \ diff --git a/src/libutil/include/nix/util/compression.hh b/src/libutil/include/nix/util/compression.hh index db49c8dfd6c..75b17f223ed 100644 --- a/src/libutil/include/nix/util/compression.hh +++ b/src/libutil/include/nix/util/compression.hh @@ -6,6 +6,7 @@ #include "nix/util/serialise.hh" #include "nix/util/compression-algo.hh" +#include #include namespace nix { @@ -17,9 +18,9 @@ struct CompressionSink : BufferedSink, FinishSink using FinishSink::finish; }; -std::string decompress(const std::string & method, std::string_view in); +std::string decompress(const std::optional & method, std::string_view in); -std::unique_ptr makeDecompressionSink(const std::string & method, Sink & nextSink); +std::unique_ptr makeDecompressionSink(const std::optional & method, Sink & nextSink); std::string compress(CompressionAlgo method, std::string_view in, const bool parallel = false, int level = -1); diff --git a/src/libutil/include/nix/util/tarfile.hh b/src/libutil/include/nix/util/tarfile.hh index 324c7c8a8a9..47d58d959e8 100644 --- a/src/libutil/include/nix/util/tarfile.hh +++ b/src/libutil/include/nix/util/tarfile.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include "nix/util/compression-algo.hh" #include "nix/util/serialise.hh" #include "nix/util/fs-sink.hh" #include @@ -22,7 +23,7 @@ struct TarArchive /// @param raw - Whether to enable raw file support. For more info look in docs: /// https://manpages.debian.org/stretch/libarchive-dev/archive_read_format.3.en.html /// @param compression_method - Primary compression method to use. std::nullopt means 'all'. - TarArchive(Source & source, bool raw = false, std::optional compression_method = std::nullopt); + TarArchive(Source & source, bool raw = false, std::optional compression_method = std::nullopt); /// Disable copy constructor. Explicitly default move assignment/constructor. TarArchive(const TarArchive &) = delete; diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index eea03766375..7e9b6771dfb 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -1,6 +1,8 @@ #include #include +#include +#include "nix/util/compression-algo.hh" #include "nix/util/finally.hh" #include "nix/util/serialise.hh" #include "nix/util/tarfile.hh" @@ -57,11 +59,12 @@ void TarArchive::check(int err, const std::string & reason) /// Instead it's necessary to use this kludge to convert method -> code and /// then use archive_read_support_filter_by_code. Arguably this is better than /// hand-rolling the equivalent function that is better implemented in libarchive. -int getArchiveFilterCodeByName(const std::string & method) +int getArchiveFilterCodeByName(const std::optional & method) { auto * ar = archive_write_new(); auto cleanup = Finally{[&ar]() { checkLibArchive(ar, archive_write_close(ar), "failed to close archive: %s"); }}; - auto err = archive_write_add_filter_by_name(ar, method.c_str()); + auto err = archive_write_add_filter_by_name( + ar, showCompressionAlgo(method.value()).c_str()); // method.value_or(CompressionAlgo::none) checkLibArchive(ar, err, "failed to get libarchive filter by name: %s"); auto code = archive_filter_code(ar, 0); return code; @@ -78,7 +81,7 @@ static void enableSupportedFormats(struct archive * archive) archive_read_support_format_empty(archive); } -TarArchive::TarArchive(Source & source, bool raw, std::optional compression_method) +TarArchive::TarArchive(Source & source, bool raw, std::optional compression_method) : archive{archive_read_new()} , source{&source} , buffer(defaultBufferSize)