Merge branch 'master' into overlayfs-store

This commit is contained in:
John Ericson 2024-03-18 16:41:29 -04:00
commit 18945e3f44
131 changed files with 1958 additions and 4473 deletions

View file

@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v25 - uses: cachix/install-nix-action@v26
with: with:
# The sandbox would otherwise be disabled by default on Darwin # The sandbox would otherwise be disabled by default on Darwin
extra_nix_config: "sandbox = true" extra_nix_config: "sandbox = true"
@ -62,7 +62,7 @@ jobs:
with: with:
fetch-depth: 0 fetch-depth: 0
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v25 - uses: cachix/install-nix-action@v26
with: with:
install_url: https://releases.nixos.org/nix/nix-2.20.3/install install_url: https://releases.nixos.org/nix/nix-2.20.3/install
- uses: cachix/cachix-action@v14 - uses: cachix/cachix-action@v14
@ -84,7 +84,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
- uses: cachix/install-nix-action@v25 - uses: cachix/install-nix-action@v26
with: with:
install_url: '${{needs.installer.outputs.installerURL}}' install_url: '${{needs.installer.outputs.installerURL}}'
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
@ -114,7 +114,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: cachix/install-nix-action@v25 - uses: cachix/install-nix-action@v26
with: with:
install_url: https://releases.nixos.org/nix/nix-2.20.3/install install_url: https://releases.nixos.org/nix/nix-2.20.3/install
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
@ -157,4 +157,5 @@ jobs:
docker push $IMAGE_ID:$NIX_VERSION docker push $IMAGE_ID:$NIX_VERSION
docker push $IMAGE_ID:latest docker push $IMAGE_ID:latest
# deprecated 2024-02-24 # deprecated 2024-02-24
docker tag nix:$NIX_VERSION $IMAGE_ID:master
docker push $IMAGE_ID:master docker push $IMAGE_ID:master

1
.gitignore vendored
View file

@ -10,6 +10,7 @@ perl/Makefile.config
/stamp-h1 /stamp-h1
/svn-revision /svn-revision
/libtool /libtool
/config/config.*
# /doc/manual/ # /doc/manual/
/doc/manual/*.1 /doc/manual/*.1

View file

@ -1 +1 @@
2.21.0 2.22.0

View file

@ -67,7 +67,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
- [ ] API documentation in header files - [ ] API documentation in header files
- [ ] Code and comments are self-explanatory - [ ] Code and comments are self-explanatory
- [ ] Commit message explains **why** the change was made - [ ] Commit message explains **why** the change was made
- [ ] New feature or incompatible change: updated [release notes](./doc/manual/src/release-notes/rl-next.md) - [ ] New feature or incompatible change: [add a release note](https://nixos.org/manual/nix/stable/contributing/hacking#add-a-release-note)
7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams). 7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).

1700
config/config.guess vendored

File diff suppressed because it is too large Load diff

1860
config/config.sub vendored

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@
const redirects = { const redirects = {
"index.html": { "index.html": {
"part-advanced-topics": "advanced-topics/advanced-topics.html", "part-advanced-topics": "advanced-topics/index.html",
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html", "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
"chap-diff-hook": "advanced-topics/diff-hook.html", "chap-diff-hook": "advanced-topics/diff-hook.html",
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered", "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
@ -22,7 +22,7 @@ const redirects = {
"chap-post-build-hook": "advanced-topics/post-build-hook.html", "chap-post-build-hook": "advanced-topics/post-build-hook.html",
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
"chap-writing-nix-expressions": "language/index.html", "chap-writing-nix-expressions": "language/index.html",
"part-command-ref": "command-ref/command-ref.html", "part-command-ref": "command-ref/index.html",
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris", "conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
@ -261,7 +261,7 @@ const redirects = {
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables", "sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file", "sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon", "sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
"chap-installation": "installation/installation.html", "chap-installation": "installation/index.html",
"ch-installing-binary": "installation/installing-binary.html", "ch-installing-binary": "installation/installing-binary.html",
"sect-macos-installation": "installation/installing-binary.html#macos-installation", "sect-macos-installation": "installation/installing-binary.html#macos-installation",
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation", "sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
@ -288,7 +288,7 @@ const redirects = {
"ssec-copy-closure": "package-management/copy-closure.html", "ssec-copy-closure": "package-management/copy-closure.html",
"sec-garbage-collection": "package-management/garbage-collection.html", "sec-garbage-collection": "package-management/garbage-collection.html",
"ssec-gc-roots": "package-management/garbage-collector-roots.html", "ssec-gc-roots": "package-management/garbage-collector-roots.html",
"chap-package-management": "package-management/package-management.html", "chap-package-management": "package-management/index.html",
"sec-profiles": "package-management/profiles.html", "sec-profiles": "package-management/profiles.html",
"ssec-s3-substituter": "package-management/s3-substituter.html", "ssec-s3-substituter": "package-management/s3-substituter.html",
"ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache", "ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
@ -297,7 +297,7 @@ const redirects = {
"sec-sharing-packages": "package-management/sharing-packages.html", "sec-sharing-packages": "package-management/sharing-packages.html",
"ssec-ssh-substituter": "package-management/ssh-substituter.html", "ssec-ssh-substituter": "package-management/ssh-substituter.html",
"chap-quick-start": "quick-start.html", "chap-quick-start": "quick-start.html",
"sec-relnotes": "release-notes/release-notes.html", "sec-relnotes": "release-notes/index.html",
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html", "ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
"ch-relnotes-0.10": "release-notes/rl-0.10.html", "ch-relnotes-0.10": "release-notes/rl-0.10.html",
"ssec-relnotes-0.11": "release-notes/rl-0.11.html", "ssec-relnotes-0.11": "release-notes/rl-0.11.html",

View file

@ -1,40 +0,0 @@
---
synopsis: Concise error printing in `nix repl`
prs: 9928
---
Previously, if an element of a list or attribute set threw an error while
evaluating, `nix repl` would print the entire error (including source location
information) inline. This output was clumsy and difficult to parse:
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error:
… while calling the 'throw' builtin
at «string»:1:9:
1| { err = builtins.throw "uh oh!"; }
| ^
error: uh oh!»; }
```
Now, only the error message is displayed, making the output much more readable.
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error: uh oh!»; }
```
However, if the whole expression being evaluated throws an error, source
locations and (if applicable) a stack trace are printed, just like you'd expect:
```
nix-repl> builtins.throw "uh oh!"
error:
… while calling the 'throw' builtin
at «string»:1:1:
1| builtins.throw "uh oh!"
| ^
error: uh oh!
```

View file

@ -1,9 +0,0 @@
---
synopsis: "`--debugger` can now access bindings from `let` expressions"
prs: 9918
issues: 8827.
---
Breakpoints and errors in the bindings of a `let` expression can now access
those bindings in the debugger. Previously, only the body of `let` expressions
could access those bindings.

View file

@ -1,9 +0,0 @@
---
synopsis: Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set
prs: 9914
---
If the `debugger-on-trace` option is set and `--debugger` is given,
`builtins.trace` calls will behave similarly to `builtins.break` and will enter
the debug REPL. This is useful for determining where warnings are being emitted
from.

View file

@ -1,25 +0,0 @@
---
synopsis: Debugger prints source position information
prs: 9913
---
The `--debugger` now prints source location information, instead of the
pointers of source location information. Before:
```
nix-repl> :bt
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
0x600001522598
```
After:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
131|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
| ^
133| in
```

View file

@ -1,25 +0,0 @@
---
synopsis: The `--debugger` will start more reliably in `let` expressions and function calls
prs: 9917
issues: 6649
---
Previously, if you attempted to evaluate this file with the debugger:
```nix
let
a = builtins.trace "before inner break" (
builtins.break "hello"
);
b = builtins.trace "before outer break" (
builtins.break a
);
in
b
```
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
it to `:continue`, it would skip over the `builtins.break "hello"` expression
entirely.
Now, Nix will correctly enter the debugger at both breakpoints.

View file

@ -1,7 +0,0 @@
---
synopsis: "`inherit (x) ...` evaluates `x` only once"
prs: 9847
---
`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.

View file

@ -1,50 +0,0 @@
---
synopsis: Functions are printed with more detail
prs: 9606
issues: 7145
---
Functions and `builtins` are printed with more detail in `nix repl`, `nix
eval`, `builtins.trace`, and most other places values are printed.
Before:
```
$ nix repl nixpkgs
nix-repl> builtins.map
«primop»
nix-repl> builtins.map lib.id
«primop-app»
nix-repl> builtins.trace lib.id "my-value"
trace: <LAMBDA>
"my-value"
$ nix eval --file functions.nix
{ id = <LAMBDA>; primop = <PRIMOP>; primop-app = <PRIMOP-APP>; }
```
After:
```
$ nix repl nixpkgs
nix-repl> builtins.map
«primop map»
nix-repl> builtins.map lib.id
«partially applied primop map»
nix-repl> builtins.trace lib.id "my-value"
trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
"my-value"
$ nix eval --file functions.nix
{ id = «lambda id @ /Users/wiggles/nix/functions.nix:2:8»; primop = «primop map»; primop-app = «partially applied primop map»; }
```
This was actually released in Nix 2.20, but wasn't added to the release notes
so we're announcing it here. The historical release notes have been updated as well.
[type-error]: https://github.com/NixOS/nix/pull/9753
[coercion-error]: https://github.com/NixOS/nix/pull/9754

View file

@ -1,10 +0,0 @@
---
synopsis: Store paths are allowed to start with `.`
issues: 912
prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224
---
Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).

View file

@ -1,13 +0,0 @@
---
synopsis: Nix commands respect Ctrl-C
prs: 9687 6995
issues: 7245
---
Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
while performing various operations (including `nix develop`, `nix flake
update`, and so on). With several fixes to Nix's signal handlers, Nix commands
will now exit quickly after Ctrl-C is pressed.
This was actually released in Nix 2.20, but wasn't added to the release notes
so we're announcing it here. The historical release notes have been updated as well.

View file

@ -0,0 +1,13 @@
---
synopsis: "`nix eval` prints derivations as `.drv` paths"
prs: 10200
---
`nix eval` will now print derivations as their `.drv` paths, rather than as
attribute sets. This makes commands like `nix eval nixpkgs#bash` terminate
instead of infinitely looping into recursive self-referential attributes:
```ShellSession
$ nix eval nixpkgs#bash
«derivation /nix/store/m32cbgbd598f4w299g0hwyv7gbw6rqcg-bash-5.2p26.drv»
```

View file

@ -1,24 +0,0 @@
---
synopsis: "`nix repl` pretty-prints values"
prs: 9931
---
`nix repl` will now pretty-print values:
```
{
attrs = {
a = {
b = {
c = { };
};
};
};
list = [ 1 ];
list' = [
1
2
3
];
}
```

View file

@ -1,37 +0,0 @@
---
synopsis: "Visual clutter in `--debugger` is reduced"
prs: 9919
---
Before:
```
info: breakpoint reached
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl> :continue
error: uh oh
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl>
```
After:
```
info: breakpoint reached
Nix 2.20.0pre20231222_dirty debugger
Type :? for help.
nix-repl> :continue
error: uh oh
nix-repl>
```

View file

@ -1,8 +0,0 @@
---
synopsis: "`nix repl` now respects Ctrl-C while printing values"
prs: 9927
---
`nix repl` will now halt immediately when Ctrl-C is pressed while it's printing
a value. This is useful if you got curious about what would happen if you
printed all of Nixpkgs.

View file

@ -1,22 +0,0 @@
---
synopsis: Cycle detection in `nix repl` is simpler and more reliable
prs: 9926
issues: 8672
---
The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
else values are printed is now simpler and matches the cycle detection in
`nix-instantiate --eval` output.
Before:
```
nix eval --expr 'let self = { inherit self; }; in self'
{ self = { self = «repeated»; }; }
```
After:
```
{ self = «repeated»; }
```

View file

@ -1,23 +0,0 @@
---
synopsis: "In the debugger, `while evaluating the attribute` errors now include position information"
prs: 9915
---
Before:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
0x600001522598
```
After:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
131|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
| ^
133| in
```

View file

@ -1,9 +0,0 @@
---
synopsis: Stack size is increased on macOS
prs: 9860
---
Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
stack size set to the default (approximately 8KiB) on macOS. Now, the stack
size is correctly set to 64MiB on macOS as well, which should reduce stack
overflow segfaults in deeply-recursive Nix expressions.

View file

@ -121,6 +121,7 @@
- [C++ style guide](contributing/cxx.md) - [C++ style guide](contributing/cxx.md)
- [Release Notes](release-notes/index.md) - [Release Notes](release-notes/index.md)
{{#include ./SUMMARY-rl-next.md}} {{#include ./SUMMARY-rl-next.md}}
- [Release 2.21 (2024-03-11)](release-notes/rl-2.21.md)
- [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md) - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md)
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)

View file

@ -86,7 +86,7 @@
[store path]: #gloss-store-path [store path]: #gloss-store-path
- [file system object]{#gloss-store-object} - [file system object]{#gloss-file-system-object}
The Nix data model for representing simplified file system data. The Nix data model for representing simplified file system data.

View file

@ -188,9 +188,13 @@ Derivations can declare some infrequently used optional attributes.
} }
``` ```
The `outputHashAlgo` attribute specifies the hash algorithm used to The `outputHash` attribute must be a string containing the hash in either hexadecimal or "nix32" encoding, or following the format for integrity metadata as defined by [SRI](https://www.w3.org/TR/SRI/).
compute the hash. It can currently be `"sha1"`, `"sha256"` or The "nix32" encoding is an adaptation of base-32 encoding.
`"sha512"`. The [`convertHash`](@docroot@/language/builtins.md#builtins-convertHash) function shows how to convert between different encodings, and the [`nix-hash` command](../command-ref/nix-hash.md) has information about obtaining the hash for some contents, as well as converting to and from encodings.
The `outputHashAlgo` attribute specifies the hash algorithm used to compute the hash.
It can currently be `"sha1"`, `"sha256"`, `"sha512"`, or `null`.
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format.
The `outputHashMode` attribute determines how the hash is computed. The `outputHashMode` attribute determines how the hash is computed.
It must be one of the following two values: It must be one of the following two values:
@ -209,11 +213,6 @@ Derivations can declare some infrequently used optional attributes.
this case, the output can be anything, including a directory this case, the output can be anything, including a directory
tree. tree.
The `outputHash` attribute, finally, must be a string containing
the hash in either hexadecimal or base-32 notation. (See the
[`nix-hash` command](../command-ref/nix-hash.md) for information
about converting to and from base-32 notation.)
- [`__contentAddressed`]{#adv-attr-__contentAddressed} - [`__contentAddressed`]{#adv-attr-__contentAddressed}
> **Warning** > **Warning**
> This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md). > This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).

View file

@ -432,6 +432,32 @@ This is an incomplete overview of language features, by example.
</td> </td>
</tr> </tr>
<tr>
<td>
`inherit pkgs src;`
</td>
<td>
Adds the variables to the current scope (attribute set or `let` binding).
Desugars to `pkgs = pkgs; src = src;`
</td>
</tr>
<tr>
<td>
`inherit (pkgs) lib stdenv;`
</td>
<td>
Adds the attributes, from the attribute set in parentheses, to the current scope (attribute set or `let` binding).
Desugars to `lib = pkgs.lib; stdenv = pkgs.stdenv;`
</td>
</tr>
<tr> <tr>
<td> <td>

View file

@ -34,7 +34,7 @@ For more in-depth information you are kindly referred to subsequent chapters.
lolcat: command not found lolcat: command not found
``` ```
1. Search for more packages on <search.nixos.org> to try them out. 1. Search for more packages on [search.nixos.org](https://search.nixos.org/) to try them out.
1. Free up storage space: 1. Free up storage space:

View file

@ -0,0 +1,302 @@
# Release 2.21.0 (2024-03-11)
- Fix a fixed-output derivation sandbox escape (CVE-2024-27297)
Cooperating Nix derivations could send file descriptors to files in the Nix
store to each other via Unix domain sockets in the abstract namespace. This
allowed one derivation to modify the output of the other derivation, after Nix
has registered the path as "valid" and immutable in the Nix database.
In particular, this allowed the output of fixed-output derivations to be
modified from their expected content.
This isn't the case any more.
- CLI options `--arg-from-file` and `--arg-from-stdin` [#10122](https://github.com/NixOS/nix/pull/10122)
The new CLI option `--arg-from-file` *name* *path* passes the contents
of file *path* as a string value via the function argument *name* to a
Nix expression. Similarly, the new option `--arg-from-stdin` *name*
reads the contents of the string from standard input.
- Concise error printing in `nix repl` [#9928](https://github.com/NixOS/nix/pull/9928)
Previously, if an element of a list or attribute set threw an error while
evaluating, `nix repl` would print the entire error (including source location
information) inline. This output was clumsy and difficult to parse:
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error:
… while calling the 'throw' builtin
at «string»:1:9:
1| { err = builtins.throw "uh oh!"; }
| ^
error: uh oh!»; }
```
Now, only the error message is displayed, making the output much more readable.
```
nix-repl> { err = builtins.throw "uh oh!"; }
{ err = «error: uh oh!»; }
```
However, if the whole expression being evaluated throws an error, source
locations and (if applicable) a stack trace are printed, just like you'd expect:
```
nix-repl> builtins.throw "uh oh!"
error:
… while calling the 'throw' builtin
at «string»:1:1:
1| builtins.throw "uh oh!"
| ^
error: uh oh!
```
- `--debugger` can now access bindings from `let` expressions [#8827](https://github.com/NixOS/nix/issues/8827) [#9918](https://github.com/NixOS/nix/pull/9918)
Breakpoints and errors in the bindings of a `let` expression can now access
those bindings in the debugger. Previously, only the body of `let` expressions
could access those bindings.
- Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set [#9914](https://github.com/NixOS/nix/pull/9914)
If the `debugger-on-trace` option is set and `--debugger` is given,
`builtins.trace` calls will behave similarly to `builtins.break` and will enter
the debug REPL. This is useful for determining where warnings are being emitted
from.
- Debugger prints source position information [#9913](https://github.com/NixOS/nix/pull/9913)
The `--debugger` now prints source location information, instead of the
pointers of source location information. Before:
```
nix-repl> :bt
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
0x600001522598
```
After:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
131|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
| ^
133| in
```
- The `--debugger` will start more reliably in `let` expressions and function calls [#6649](https://github.com/NixOS/nix/issues/6649) [#9917](https://github.com/NixOS/nix/pull/9917)
Previously, if you attempted to evaluate this file with the debugger:
```nix
let
a = builtins.trace "before inner break" (
builtins.break "hello"
);
b = builtins.trace "before outer break" (
builtins.break a
);
in
b
```
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
it to `:continue`, it would skip over the `builtins.break "hello"` expression
entirely.
Now, Nix will correctly enter the debugger at both breakpoints.
- Nested debuggers are no longer supported [#9920](https://github.com/NixOS/nix/pull/9920)
Previously, evaluating an expression that throws an error in the debugger would
enter a second, nested debugger:
```
nix-repl> builtins.throw "what"
error: what
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.18.1. Type :? for help.
nix-repl>
```
Now, it just prints the error message like `nix repl`:
```
nix-repl> builtins.throw "what"
error:
… while calling the 'throw' builtin
at «string»:1:1:
1| builtins.throw "what"
| ^
error: what
```
- Consistent order of function arguments in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
Function arguments are now printed in lexicographic order rather than the internal, creation-time based symbol order.
- Fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
When an `inherit` caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
- `inherit (x) ...` evaluates `x` only once [#9847](https://github.com/NixOS/nix/pull/9847)
`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
- Store paths are allowed to start with `.` [#912](https://github.com/NixOS/nix/issues/912) [#9091](https://github.com/NixOS/nix/pull/9091) [#9095](https://github.com/NixOS/nix/pull/9095) [#9120](https://github.com/NixOS/nix/pull/9120) [#9121](https://github.com/NixOS/nix/pull/9121) [#9122](https://github.com/NixOS/nix/pull/9122) [#9130](https://github.com/NixOS/nix/pull/9130) [#9219](https://github.com/NixOS/nix/pull/9219) [#9224](https://github.com/NixOS/nix/pull/9224) [#9867](https://github.com/NixOS/nix/pull/9867)
Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
From now on, leading periods are supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
- `nix repl` pretty-prints values [#9931](https://github.com/NixOS/nix/pull/9931)
`nix repl` will now pretty-print values:
```
{
attrs = {
a = {
b = {
c = { };
};
};
};
list = [ 1 ];
list' = [
1
2
3
];
}
```
- Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade` [#10166](https://github.com/NixOS/nix/pull/10166)
Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
For instance:
```
nix profile remove '.*vim.*'
```
This would remove all packages that contain `vim` in their name.
In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
To avoid unintended behavior, the arguments are now only matching exact names.
Matching using regular expressions is still possible by using the new `--regex` flag:
```
nix profile remove --regex '.*vim.*'
```
One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
```
nix profile upgrade '.*'
```
With the introduction of the `--all` flag, this now becomes more straightforward:
```
nix profile upgrade --all
```
- Visual clutter in `--debugger` is reduced [#9919](https://github.com/NixOS/nix/pull/9919)
Before:
```
info: breakpoint reached
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl> :continue
error: uh oh
Starting REPL to allow you to inspect the current state of the evaluator.
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
nix-repl>
```
After:
```
info: breakpoint reached
Nix 2.20.0pre20231222_dirty debugger
Type :? for help.
nix-repl> :continue
error: uh oh
nix-repl>
```
- Cycle detection in `nix repl` is simpler and more reliable [#8672](https://github.com/NixOS/nix/issues/8672) [#9926](https://github.com/NixOS/nix/pull/9926)
The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
else values are printed is now simpler and matches the cycle detection in
`nix-instantiate --eval` output.
Before:
```
nix eval --expr 'let self = { inherit self; }; in self'
{ self = { self = «repeated»; }; }
```
After:
```
{ self = «repeated»; }
```
- In the debugger, `while evaluating the attribute` errors now include position information [#9915](https://github.com/NixOS/nix/pull/9915)
Before:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
0x600001522598
```
After:
```
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
131|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
| ^
133| in
```
- Stack size is increased on macOS [#9860](https://github.com/NixOS/nix/pull/9860)
Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
stack size set to the default (approximately 8KiB) on macOS. Now, the stack
size is correctly set to 64MiB on macOS as well, which should reduce stack
overflow segfaults in deeply-recursive Nix expressions.

View file

@ -31,7 +31,6 @@
crossSystems = [ crossSystems = [
"armv6l-unknown-linux-gnueabihf" "armv6l-unknown-linux-gnueabihf"
"armv7l-unknown-linux-gnueabihf" "armv7l-unknown-linux-gnueabihf"
"x86_64-unknown-freebsd13"
"x86_64-unknown-netbsd" "x86_64-unknown-netbsd"
]; ];
@ -299,8 +298,11 @@
'' ''
type -p nix-env type -p nix-env
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593. # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages (
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]] set -x
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
[[ $(sha1sum < packages | cut -c1-40) = e01b031fc9785a572a38be6bc473957e3b6faad7 ]]
)
mkdir $out mkdir $out
''; '';
@ -341,7 +343,6 @@
checks = forAllSystems (system: { checks = forAllSystems (system: {
binaryTarball = self.hydraJobs.binaryTarball.${system}; binaryTarball = self.hydraJobs.binaryTarball.${system};
perlBindings = self.hydraJobs.perlBindings.${system};
installTests = self.hydraJobs.installTests.${system}; installTests = self.hydraJobs.installTests.${system};
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
rl-next = rl-next =
@ -351,6 +352,11 @@
''; '';
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { } // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
dockerImage = self.hydraJobs.dockerImage.${system}; dockerImage = self.hydraJobs.dockerImage.${system};
} // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
# Some perl dependencies are broken on i686-linux.
# Since the support is only best-effort there, disable the perl
# bindings
perlBindings = self.hydraJobs.perlBindings.${system};
}); });
packages = forAllSystems (system: rec { packages = forAllSystems (system: rec {

View file

@ -11,6 +11,8 @@ use JSON::PP;
use LWP::UserAgent; use LWP::UserAgent;
use Net::Amazon::S3; use Net::Amazon::S3;
delete $ENV{'shell'}; # shut up a LWP::UserAgent.pm warning
my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n"; my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
my $releasesBucketName = "nix-releases"; my $releasesBucketName = "nix-releases";
@ -36,9 +38,9 @@ sub fetch {
my $evalUrl = "https://hydra.nixos.org/eval/$evalId"; my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
my $evalInfo = decode_json(fetch($evalUrl, 'application/json')); my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
#print Dumper($evalInfo); #print Dumper($evalInfo);
my $flakeUrl = $evalInfo->{flake} or die; my $flakeUrl = $evalInfo->{flake};
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die); my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
my $nixRev = $flakeInfo->{revision} or die; my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json')); my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
#print Dumper($buildInfo); #print Dumper($buildInfo);
@ -83,12 +85,19 @@ my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
sub getStorePath { sub getStorePath {
my ($jobName, $output) = @_; my ($jobName, $output) = @_;
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json')); my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'"; return $buildInfo->{buildoutputs}->{$output or "out"}->{path} // die "cannot get store path for '$jobName'";
} }
sub copyManual { sub copyManual {
my $manual = getStorePath("build.x86_64-linux", "doc"); my $manual;
print "$manual\n"; eval {
$manual = getStorePath("build.x86_64-linux", "doc");
};
if ($@) {
warn "$@";
return;
}
print "Manual: $manual\n";
my $manualNar = "$tmpDir/$releaseName-manual.nar.xz"; my $manualNar = "$tmpDir/$releaseName-manual.nar.xz";
print "$manualNar\n"; print "$manualNar\n";
@ -154,19 +163,33 @@ downloadFile("binaryTarball.x86_64-linux", "1");
downloadFile("binaryTarball.aarch64-linux", "1"); downloadFile("binaryTarball.aarch64-linux", "1");
downloadFile("binaryTarball.x86_64-darwin", "1"); downloadFile("binaryTarball.x86_64-darwin", "1");
downloadFile("binaryTarball.aarch64-darwin", "1"); downloadFile("binaryTarball.aarch64-darwin", "1");
downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1"); eval {
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1"); downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
};
warn "$@" if $@;
eval {
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
};
warn "$@" if $@;
downloadFile("installerScript", "1"); downloadFile("installerScript", "1");
# Upload docker images to dockerhub. # Upload docker images to dockerhub.
my $dockerManifest = ""; my $dockerManifest = "";
my $dockerManifestLatest = ""; my $dockerManifestLatest = "";
my $haveDocker = 0;
for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) { for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
my $system = $platforms->[0]; my $system = $platforms->[0];
my $dockerPlatform = $platforms->[1]; my $dockerPlatform = $platforms->[1];
my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz"; my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
downloadFile("dockerImage.$system", "1", $fn); eval {
downloadFile("dockerImage.$system", "1", $fn);
};
if ($@) {
warn "$@" if $@;
next;
}
$haveDocker = 1;
print STDERR "loading docker image for $dockerPlatform...\n"; print STDERR "loading docker image for $dockerPlatform...\n";
system("docker load -i $tmpDir/$fn") == 0 or die; system("docker load -i $tmpDir/$fn") == 0 or die;
@ -194,21 +217,23 @@ for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
$dockerManifestLatest .= " --amend $latestTag" $dockerManifestLatest .= " --amend $latestTag"
} }
print STDERR "creating multi-platform docker manifest...\n"; if ($haveDocker) {
system("docker manifest rm nixos/nix:$version"); print STDERR "creating multi-platform docker manifest...\n";
system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die; system("docker manifest rm nixos/nix:$version");
if ($isLatest) { system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
print STDERR "creating latest multi-platform docker manifest...\n"; if ($isLatest) {
system("docker manifest rm nixos/nix:latest"); print STDERR "creating latest multi-platform docker manifest...\n";
system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die; system("docker manifest rm nixos/nix:latest");
} system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
}
print STDERR "pushing multi-platform docker manifest...\n"; print STDERR "pushing multi-platform docker manifest...\n";
system("docker manifest push nixos/nix:$version") == 0 or die; system("docker manifest push nixos/nix:$version") == 0 or die;
if ($isLatest) { if ($isLatest) {
print STDERR "pushing latest multi-platform docker manifest...\n"; print STDERR "pushing latest multi-platform docker manifest...\n";
system("docker manifest push nixos/nix:latest") == 0 or die; system("docker manifest push nixos/nix:latest") == 0 or die;
}
} }
# Upload nix-fallback-paths.nix. # Upload nix-fallback-paths.nix.

View file

@ -1,5 +1,5 @@
%.gen.hh: % %.gen.hh: %
@echo 'R"foo(' >> $@.tmp @echo 'R"__NIX_STR(' >> $@.tmp
$(trace-gen) cat $< >> $@.tmp $(trace-gen) cat $< >> $@.tmp
@echo ')foo"' >> $@.tmp @echo ')__NIX_STR"' >> $@.tmp
@mv $@.tmp $@ @mv $@.tmp $@

View file

@ -24,6 +24,7 @@
, libgit2 , libgit2
, libseccomp , libseccomp
, libsodium , libsodium
, man
, lowdown , lowdown
, mdbook , mdbook
, mdbook-linkcheck , mdbook-linkcheck
@ -213,6 +214,7 @@ in {
git git
mercurial mercurial
openssh openssh
man # for testing `nix-* --help`
] ++ lib.optionals (doInstallCheck || enableManual) [ ] ++ lib.optionals (doInstallCheck || enableManual) [
jq # Also for custom mdBook preprocessor. jq # Also for custom mdBook preprocessor.
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
@ -341,15 +343,22 @@ in {
# Work around weird bug where it doesn't think there is a Makefile. # Work around weird bug where it doesn't think there is a Makefile.
installCheckPhase = if (!doBuild && doInstallCheck) then '' installCheckPhase = if (!doBuild && doInstallCheck) then ''
runHook preInstallCheck
mkdir -p src/nix-channel mkdir -p src/nix-channel
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
'' else null; '' else null;
# Needed for tests if we are not doing a build, but testing existing # Needed for tests if we are not doing a build, but testing existing
# built Nix. # built Nix.
preInstallCheck = lib.optionalString (! doBuild) '' preInstallCheck =
mkdir -p src/nix-channel lib.optionalString (! doBuild) ''
''; mkdir -p src/nix-channel
''
# See https://github.com/NixOS/nix/issues/2523
# Occurs often in tests since https://github.com/NixOS/nix/pull/9900
+ lib.optionalString stdenv.hostPlatform.isDarwin ''
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
'';
separateDebugInfo = !stdenv.hostPlatform.isStatic; separateDebugInfo = !stdenv.hostPlatform.isStatic;

View file

@ -58,31 +58,6 @@ readonly EXTRACTED_NIX_PATH="$(dirname "$0")"
readonly ROOT_HOME=~root readonly ROOT_HOME=~root
readonly PROXY_ENVIRONMENT_VARIABLES=(
http_proxy
https_proxy
ftp_proxy
no_proxy
HTTP_PROXY
HTTPS_PROXY
FTP_PROXY
NO_PROXY
)
SUDO_EXTRA_ENVIRONMENT_VARIABLES=()
setup_sudo_extra_environment_variables() {
local i=${#SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}
for variable in "${PROXY_ENVIRONMENT_VARIABLES[@]}"; do
if [ "x${!variable:-}" != "x" ]; then
SUDO_EXTRA_ENVIRONMENT_VARIABLES[i]="$variable=${!variable}"
i=$((i + 1))
fi
done
}
setup_sudo_extra_environment_variables
if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then if [ -t 0 ] && [ -z "${NIX_INSTALLER_YES:-}" ]; then
readonly IS_HEADLESS='no' readonly IS_HEADLESS='no'
else else
@ -386,7 +361,7 @@ _sudo() {
if is_root; then if is_root; then
env "$@" env "$@"
else else
sudo "${SUDO_EXTRA_ENVIRONMENT_VARIABLES[@]}" "$@" sudo "$@"
fi fi
} }

View file

@ -20,7 +20,7 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the value *expr* as the argument *name* to Nix functions.", .description = "Pass the value *expr* as the argument *name* to Nix functions.",
.category = category, .category = category,
.labels = {"name", "expr"}, .labels = {"name", "expr"},
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }} .handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr(expr)}); }}
}); });
addFlag({ addFlag({
@ -28,7 +28,24 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the string *string* as the argument *name* to Nix functions.", .description = "Pass the string *string* as the argument *name* to Nix functions.",
.category = category, .category = category,
.labels = {"name", "string"}, .labels = {"name", "string"},
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }}, .handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
});
addFlag({
.longName = "arg-from-file",
.description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "path"},
.handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile(path)}); }},
.completer = completePath
});
addFlag({
.longName = "arg-from-stdin",
.description = "Pass the contents of stdin as the argument *name* to Nix functions.",
.category = category,
.labels = {"name"},
.handler = {[&](std::string name) { autoArgs.insert_or_assign(name, AutoArg{AutoArgStdin{}}); }},
}); });
addFlag({ addFlag({
@ -154,13 +171,23 @@ MixEvalArgs::MixEvalArgs()
Bindings * MixEvalArgs::getAutoArgs(EvalState & state) Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
{ {
auto res = state.buildBindings(autoArgs.size()); auto res = state.buildBindings(autoArgs.size());
for (auto & i : autoArgs) { for (auto & [name, arg] : autoArgs) {
auto v = state.allocValue(); auto v = state.allocValue();
if (i.second[0] == 'E') std::visit(overloaded {
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath("."))); [&](const AutoArgExpr & arg) {
else state.mkThunk_(*v, state.parseExprFromString(arg.expr, state.rootPath(".")));
v->mkString(((std::string_view) i.second).substr(1)); },
res.insert(state.symbols.create(i.first), v); [&](const AutoArgString & arg) {
v->mkString(arg.s);
},
[&](const AutoArgFile & arg) {
v->mkString(readFile(arg.path));
},
[&](const AutoArgStdin & arg) {
v->mkString(readFile(STDIN_FILENO));
}
}, arg);
res.insert(state.symbols.create(name), v);
} }
return res.finish(); return res.finish();
} }

View file

@ -6,6 +6,8 @@
#include "common-args.hh" #include "common-args.hh"
#include "search-path.hh" #include "search-path.hh"
#include <filesystem>
namespace nix { namespace nix {
class Store; class Store;
@ -26,7 +28,14 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
std::optional<std::string> evalStoreUrl; std::optional<std::string> evalStoreUrl;
private: private:
std::map<std::string, std::string> autoArgs; struct AutoArgExpr { std::string expr; };
struct AutoArgString { std::string s; };
struct AutoArgFile { std::filesystem::path path; };
struct AutoArgStdin { };
using AutoArg = std::variant<AutoArgExpr, AutoArgString, AutoArgFile, AutoArgStdin>;
std::map<std::string, AutoArg> autoArgs;
}; };
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);

View file

@ -21,6 +21,7 @@
#include "url.hh" #include "url.hh"
#include "registry.hh" #include "registry.hh"
#include "build-result.hh" #include "build-result.hh"
#include "fs-input-accessor.hh"
#include <regex> #include <regex>
#include <queue> #include <queue>
@ -146,7 +147,7 @@ MixFlakeOptions::MixFlakeOptions()
.category = category, .category = category,
.labels = {"flake-lock-path"}, .labels = {"flake-lock-path"},
.handler = {[&](std::string lockFilePath) { .handler = {[&](std::string lockFilePath) {
lockFlags.referenceLockFilePath = lockFilePath; lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath)));
}}, }},
.completer = completePath .completer = completePath
}); });
@ -442,10 +443,10 @@ ref<eval_cache::EvalCache> openEvalCache(
EvalState & state, EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake) std::shared_ptr<flake::LockedFlake> lockedFlake)
{ {
auto fingerprint = lockedFlake->getFingerprint(); auto fingerprint = lockedFlake->getFingerprint(state.store);
return make_ref<nix::eval_cache::EvalCache>( return make_ref<nix::eval_cache::EvalCache>(
evalSettings.useEvalCache && evalSettings.pureEval evalSettings.useEvalCache && evalSettings.pureEval
? std::optional { std::cref(fingerprint) } ? fingerprint
: std::nullopt, : std::nullopt,
state, state,
[&state, lockedFlake]() [&state, lockedFlake]()

View file

@ -123,7 +123,8 @@ struct NixRepl
.force = true, .force = true,
.derivationPaths = true, .derivationPaths = true,
.maxDepth = maxDepth, .maxDepth = maxDepth,
.prettyIndent = 2 .prettyIndent = 2,
.errors = ErrorPrintBehavior::ThrowTopLevel,
}); });
} }
}; };
@ -336,13 +337,7 @@ ReplExitStatus NixRepl::mainLoop()
printMsg(lvlError, e.msg()); printMsg(lvlError, e.msg());
} }
} catch (EvalError & e) { } catch (EvalError & e) {
// in debugger mode, an EvalError should trigger another repl session. printMsg(lvlError, e.msg());
// when that session returns the exception will land here. No need to show it again;
// show the error for this repl session instead.
if (state->debugRepl && !state->debugTraces.empty())
showDebugTrace(std::cout, state->positions, state->debugTraces.front());
else
printMsg(lvlError, e.msg());
} catch (Error & e) { } catch (Error & e) {
printMsg(lvlError, e.msg()); printMsg(lvlError, e.msg());
} catch (Interrupted & e) { } catch (Interrupted & e) {
@ -548,6 +543,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
<< " :l, :load <path> Load Nix expression and add it to scope\n" << " :l, :load <path> Load Nix expression and add it to scope\n"
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n" << " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
<< " :p, :print <expr> Evaluate and print expression recursively\n" << " :p, :print <expr> Evaluate and print expression recursively\n"
<< " Strings are printed directly, without escaping.\n"
<< " :q, :quit Exit nix-repl\n" << " :q, :quit Exit nix-repl\n"
<< " :r, :reload Reload all files\n" << " :r, :reload Reload all files\n"
<< " :sh <expr> Build dependencies of derivation, then start\n" << " :sh <expr> Build dependencies of derivation, then start\n"
@ -755,7 +751,11 @@ ProcessLineResult NixRepl::processLine(std::string line)
else if (command == ":p" || command == ":print") { else if (command == ":p" || command == ":print") {
Value v; Value v;
evalString(arg, v); evalString(arg, v);
printValue(std::cout, v); if (v.type() == nString) {
std::cout << v.string_view();
} else {
printValue(std::cout, v);
}
std::cout << std::endl; std::cout << std::endl;
} }

View file

@ -21,11 +21,24 @@ struct EvalSettings : Config
Setting<Strings> nixPath{ Setting<Strings> nixPath{
this, getDefaultNixPath(), "nix-path", this, getDefaultNixPath(), "nix-path",
R"( R"(
List of directories to be searched for `<...>` file references List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
This setting determines the value of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) and can be used with [`builtins.findFile`](@docroot@/language/builtin-constants.md#builtins-findFile).
In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of The default value is
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
)"}; ```
$HOME/.nix-defexpr/channels
nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs
$NIX_STATE_DIR/profiles/per-user/root/channels
```
It can be overridden with the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH) or the [`-I` command line option](@docroot@/command-ref/opt-common.md#opt-I).
> **Note**
>
> If [pure evaluation](#conf-pure-eval) is enabled, `nixPath` evaluates to the empty list `[ ]`.
)", {}, false};
Setting<std::string> currentSystem{ Setting<std::string> currentSystem{
this, "", "eval-system", this, "", "eval-system",
@ -55,8 +68,6 @@ struct EvalSettings : Config
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath), [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
or to URIs outside of or to URIs outside of
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris). [`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
)"}; )"};
Setting<bool> pureEval{this, false, "pure-eval", Setting<bool> pureEval{this, false, "pure-eval",

View file

@ -762,10 +762,24 @@ std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const Stati
return vm; return vm;
} }
/**
* Sets `inDebugger` to true on construction and false on destruction.
*/
class DebuggerGuard {
bool & inDebugger;
public:
DebuggerGuard(bool & inDebugger) : inDebugger(inDebugger) {
inDebugger = true;
}
~DebuggerGuard() {
inDebugger = false;
}
};
void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr) void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr)
{ {
// double check we've got the debugRepl function pointer. // Make sure we have a debugger to run and we're not already in a debugger.
if (!debugRepl) if (!debugRepl || inDebugger)
return; return;
auto dts = auto dts =
@ -792,6 +806,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
auto se = getStaticEnv(expr); auto se = getStaticEnv(expr);
if (se) { if (se) {
auto vm = mapStaticEnvBindings(symbols, *se.get(), env); auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
DebuggerGuard _guard(inDebugger);
auto exitStatus = (debugRepl)(ref<EvalState>(shared_from_this()), *vm); auto exitStatus = (debugRepl)(ref<EvalState>(shared_from_this()), *vm);
switch (exitStatus) { switch (exitStatus) {
case ReplExitStatus::QuitAll: case ReplExitStatus::QuitAll:
@ -934,12 +949,11 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
void EvalState::mkPos(Value & v, PosIdx p) void EvalState::mkPos(Value & v, PosIdx p)
{ {
auto pos = positions[p]; auto origin = positions.originOf(p);
if (auto path = std::get_if<SourcePath>(&pos.origin)) { if (auto path = std::get_if<SourcePath>(&origin)) {
auto attrs = buildBindings(3); auto attrs = buildBindings(3);
attrs.alloc(sFile).mkString(path->path.abs()); attrs.alloc(sFile).mkString(path->path.abs());
attrs.alloc(sLine).mkInt(pos.line); makePositionThunks(*this, p, attrs.alloc(sLine), attrs.alloc(sColumn));
attrs.alloc(sColumn).mkInt(pos.column);
v.mkAttrs(attrs); v.mkAttrs(attrs);
} else } else
v.mkNull(); v.mkNull();
@ -2762,9 +2776,12 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<Sta
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv) Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{ {
auto s = make_ref<std::string>(std::move(s_)); // NOTE this method (and parseStdin) must take care to *fully copy* their input
s->append("\0\0", 2); // into their respective Pos::Origin until the parser stops overwriting its input
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv); // data.
auto s = make_ref<std::string>(s_);
s_.append("\0\0", 2);
return parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv);
} }
@ -2776,12 +2793,15 @@ Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath
Expr * EvalState::parseStdin() Expr * EvalState::parseStdin()
{ {
// NOTE this method (and parseExprFromString) must take care to *fully copy* their
// input into their respective Pos::Origin until the parser stops overwriting its
// input data.
//Activity act(*logger, lvlTalkative, "parsing standard input"); //Activity act(*logger, lvlTalkative, "parsing standard input");
auto buffer = drainFD(0); auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators // drainFD should have left some extra space for terminators
buffer.append("\0\0", 2); buffer.append("\0\0", 2);
auto s = make_ref<std::string>(std::move(buffer)); auto s = make_ref<std::string>(buffer);
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv); return parse(buffer.data(), buffer.size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
} }

View file

@ -153,6 +153,7 @@ struct DebugTrace {
bool isError; bool isError;
}; };
class EvalState : public std::enable_shared_from_this<EvalState> class EvalState : public std::enable_shared_from_this<EvalState>
{ {
public: public:
@ -222,6 +223,7 @@ public:
*/ */
ReplExitStatus (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv); ReplExitStatus (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
bool debugStop; bool debugStop;
bool inDebugger = false;
int trylevel; int trylevel;
std::list<DebugTrace> debugTraces; std::list<DebugTrace> debugTraces;
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs; std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;

View file

@ -139,7 +139,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean }); attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
break; break;
case nInt: case nInt:
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer); attrs.emplace(state.symbols[attr.name], (long unsigned int) attr.value->integer);
break; break;
default: default:
if (attr.name == state.symbols.create("publicKeys")) { if (attr.name == state.symbols.create("publicKeys")) {
@ -202,43 +202,27 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
return inputs; return inputs;
} }
static Flake getFlake( static Flake readFlake(
EvalState & state, EvalState & state,
const FlakeRef & originalRef, const FlakeRef & originalRef,
bool allowLookup, const FlakeRef & resolvedRef,
FlakeCache & flakeCache, const FlakeRef & lockedRef,
InputPath lockRootPath) const SourcePath & rootDir,
const InputPath & lockRootPath)
{ {
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree( auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
state, originalRef, allowLookup, flakeCache);
// We need to guard against symlink attacks, but before we start doing // NOTE evalFile forces vInfo to be an attrset because mustBeTrivial is true.
// filesystem operations we should make sure there's a flake.nix in the Value vInfo;
// first place. state.evalFile(flakePath, vInfo, true);
auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir;
auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix";
if (!pathExists(unsafeFlakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
// Guard against symlink attacks.
auto flakeDir = canonPath(unsafeFlakeDir, true);
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(storePath));
Flake flake { Flake flake {
.originalRef = originalRef, .originalRef = originalRef,
.resolvedRef = resolvedRef, .resolvedRef = resolvedRef,
.lockedRef = lockedRef, .lockedRef = lockedRef,
.storePath = storePath, .path = flakePath,
}; };
Value vInfo;
state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) { if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos); expectType(state, nString, *description->value, description->pos);
flake.description = description->value->c_str(); flake.description = description->value->c_str();
@ -247,7 +231,7 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs"); auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs)) if (auto inputs = vInfo.attrs->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath); flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakePath.parent().path.abs(), lockRootPath); // FIXME
auto sOutputs = state.symbols.create("outputs"); auto sOutputs = state.symbols.create("outputs");
@ -264,7 +248,7 @@ static Flake getFlake(
} }
} else } else
throw Error("flake '%s' lacks attribute 'outputs'", lockedRef); throw Error("flake '%s' lacks attribute 'outputs'", resolvedRef);
auto sNixConfig = state.symbols.create("nixConfig"); auto sNixConfig = state.symbols.create("nixConfig");
@ -281,7 +265,7 @@ static Flake getFlake(
NixStringContext emptyContext = {}; NixStringContext emptyContext = {};
flake.config.settings.emplace( flake.config.settings.emplace(
state.symbols[setting.name], state.symbols[setting.name],
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned()); state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true).toOwned());
} }
else if (setting.value->type() == nInt) else if (setting.value->type() == nInt)
flake.config.settings.emplace( flake.config.settings.emplace(
@ -313,12 +297,25 @@ static Flake getFlake(
attr.name != sOutputs && attr.name != sOutputs &&
attr.name != sNixConfig) attr.name != sNixConfig)
throw Error("flake '%s' has an unsupported attribute '%s', at %s", throw Error("flake '%s' has an unsupported attribute '%s', at %s",
lockedRef, state.symbols[attr.name], state.positions[attr.pos]); resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
} }
return flake; return flake;
} }
static Flake getFlake(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
FlakeCache & flakeCache,
InputPath lockRootPath)
{
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
}
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache) Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
{ {
return getFlake(state, originalRef, allowLookup, flakeCache, {}); return getFlake(state, originalRef, allowLookup, flakeCache, {});
@ -330,6 +327,13 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
return getFlake(state, originalRef, allowLookup, flakeCache); return getFlake(state, originalRef, allowLookup, flakeCache);
} }
static LockFile readLockFile(const SourcePath & lockFilePath)
{
return lockFilePath.pathExists()
? LockFile(lockFilePath.readFile(), fmt("%s", lockFilePath))
: LockFile();
}
/* Compute an in-memory lock file for the specified top-level flake, /* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, if the flake is writable. */ and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake( LockedFlake lockFlake(
@ -355,17 +359,16 @@ LockedFlake lockFlake(
throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false"); throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
} }
// FIXME: symlink attack auto oldLockFile = readLockFile(
auto oldLockFile = LockFile::read(
lockFlags.referenceLockFilePath.value_or( lockFlags.referenceLockFilePath.value_or(
state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock")); flake.lockFilePath()));
debug("old lock file: %s", oldLockFile); debug("old lock file: %s", oldLockFile);
std::map<InputPath, FlakeInput> overrides; std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> explicitCliOverrides; std::set<InputPath> explicitCliOverrides;
std::set<InputPath> overridesUsed, updatesUsed; std::set<InputPath> overridesUsed, updatesUsed;
std::map<ref<Node>, StorePath> nodePaths; std::map<ref<Node>, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) { for (auto & i : lockFlags.inputOverrides) {
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second }); overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@ -538,7 +541,7 @@ LockedFlake lockFlake(
if (mustRefetch) { if (mustRefetch) {
auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath); auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
nodePaths.emplace(childNode, inputFlake.storePath); nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false); computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
} else { } else {
computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true); computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
@ -587,13 +590,12 @@ LockedFlake lockFlake(
flake. Also, unless we already have this flake flake. Also, unless we already have this flake
in the top-level lock file, use this flake's in the top-level lock file, use this flake's
own lock file. */ own lock file. */
nodePaths.emplace(childNode, inputFlake.storePath); nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks( computeLocks(
inputFlake.inputs, childNode, inputPath, inputFlake.inputs, childNode, inputPath,
oldLock oldLock
? std::dynamic_pointer_cast<const Node>(oldLock) ? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read( : readLockFile(inputFlake.lockFilePath()).root.get_ptr(),
state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
oldLock ? lockRootPath : inputPath, oldLock ? lockRootPath : inputPath,
localPath, localPath,
false); false);
@ -605,7 +607,7 @@ LockedFlake lockFlake(
auto childNode = make_ref<LockedNode>(lockedRef, ref, false); auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
nodePaths.emplace(childNode, storePath); nodePaths.emplace(childNode, state.rootPath(state.store->toRealPath(storePath)));
node->inputs.insert_or_assign(id, childNode); node->inputs.insert_or_assign(id, childNode);
} }
@ -619,9 +621,9 @@ LockedFlake lockFlake(
}; };
// Bring in the current ref for relative path resolution if we have it // Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true); auto parentPath = flake.path.parent().path.abs();
nodePaths.emplace(newLockFile.root, flake.storePath); nodePaths.emplace(newLockFile.root, flake.path.parent());
computeLocks( computeLocks(
flake.inputs, flake.inputs,
@ -746,13 +748,15 @@ void callFlake(EvalState & state,
auto overrides = state.buildBindings(lockedFlake.nodePaths.size()); auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
for (auto & [node, storePath] : lockedFlake.nodePaths) { for (auto & [node, sourcePath] : lockedFlake.nodePaths) {
auto override = state.buildBindings(2); auto override = state.buildBindings(2);
auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo")); auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
auto lockedNode = node.dynamic_pointer_cast<const LockedNode>(); auto lockedNode = node.dynamic_pointer_cast<const LockedNode>();
auto [storePath, subdir] = state.store->toStorePath(sourcePath.path.abs());
emitTreeAttrs( emitTreeAttrs(
state, state,
storePath, storePath,
@ -766,7 +770,7 @@ void callFlake(EvalState & state,
override override
.alloc(state.symbols.create("dir")) .alloc(state.symbols.create("dir"))
.mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir); .mkString(CanonPath(subdir).rel());
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override); overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
} }
@ -921,18 +925,17 @@ static RegisterPrimOp r4({
} }
Fingerprint LockedFlake::getFingerprint() const std::optional<Fingerprint> LockedFlake::getFingerprint(ref<Store> store) const
{ {
if (lockFile.isUnlocked()) return std::nullopt;
auto fingerprint = flake.lockedRef.input.getFingerprint(store);
if (!fingerprint) return std::nullopt;
// FIXME: as an optimization, if the flake contains a lock file // FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use // and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint. // flake.sourceInfo.storePath for the fingerprint.
return hashString(HashAlgorithm::SHA256, return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%s", *fingerprint, flake.lockedRef.subdir, lockFile));
fmt("%s;%s;%d;%d;%s",
flake.storePath.to_string(),
flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));
} }
Flake::~Flake() { } Flake::~Flake() { }

View file

@ -77,18 +77,27 @@ struct Flake
* the specific local store result of invoking the fetcher * the specific local store result of invoking the fetcher
*/ */
FlakeRef lockedRef; FlakeRef lockedRef;
/**
* The path of `flake.nix`.
*/
SourcePath path;
/** /**
* pretend that 'lockedRef' is dirty * pretend that 'lockedRef' is dirty
*/ */
bool forceDirty = false; bool forceDirty = false;
std::optional<std::string> description; std::optional<std::string> description;
StorePath storePath;
FlakeInputs inputs; FlakeInputs inputs;
/** /**
* 'nixConfig' attribute * 'nixConfig' attribute
*/ */
ConfigFile config; ConfigFile config;
~Flake(); ~Flake();
SourcePath lockFilePath()
{
return path.parent() / "flake.lock";
}
}; };
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup); Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
@ -104,13 +113,13 @@ struct LockedFlake
LockFile lockFile; LockFile lockFile;
/** /**
* Store paths of nodes that have been fetched in * Source tree accessors for nodes that have been fetched in
* lockFlake(); in particular, the root node and the overriden * lockFlake(); in particular, the root node and the overriden
* inputs. * inputs.
*/ */
std::map<ref<Node>, StorePath> nodePaths; std::map<ref<Node>, SourcePath> nodePaths;
Fingerprint getFingerprint() const; std::optional<Fingerprint> getFingerprint(ref<Store> store) const;
}; };
struct LockFlags struct LockFlags
@ -165,7 +174,7 @@ struct LockFlags
/** /**
* The path to a lock file to read instead of the `flake.lock` file in the top-level flake * The path to a lock file to read instead of the `flake.lock` file in the top-level flake
*/ */
std::optional<std::string> referenceLockFilePath; std::optional<SourcePath> referenceLockFilePath;
/** /**
* The path to a lock file to write to instead of the `flake.lock` file in the top-level flake * The path to a lock file to write to instead of the `flake.lock` file in the top-level flake

View file

@ -102,6 +102,19 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
if (isFlake) { if (isFlake) {
if (!S_ISDIR(lstat(path).st_mode)) {
if (baseNameOf(path) == "flake.nix") {
// Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar`
warn(
"Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. "
"Pretending that you meant '%s'"
, path, dirOf(path));
path = dirOf(path);
} else {
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
}
}
if (!allowMissing && !pathExists(path + "/flake.nix")){ if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path); notice("path '%s' does not contain a 'flake.nix', searching up",path);
@ -124,9 +137,6 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
throw BadURL("could not find a flake.nix file"); throw BadURL("could not find a flake.nix file");
} }
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix")) if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path); throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
@ -274,7 +284,7 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
{ {
auto [storePath, lockedInput] = input.fetch(store); auto [storePath, lockedInput] = input.fetchToStore(store);
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)}; return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
} }

View file

@ -84,8 +84,10 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
return doFind(root, path, visited); return doFind(root, path, visited);
} }
LockFile::LockFile(const nlohmann::json & json, const Path & path) LockFile::LockFile(std::string_view contents, std::string_view path)
{ {
auto json = nlohmann::json::parse(contents);
auto version = json.value("version", 0); auto version = json.value("version", 0);
if (version < 5 || version > 7) if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version); throw Error("lock file '%s' has unsupported version %d", path, version);
@ -203,12 +205,6 @@ std::pair<std::string, LockFile::KeyMap> LockFile::to_string() const
return {json.dump(2), std::move(nodeKeys)}; return {json.dump(2), std::move(nodeKeys)};
} }
LockFile LockFile::read(const Path & path)
{
if (!pathExists(path)) return LockFile();
return LockFile(nlohmann::json::parse(readFile(path)), path);
}
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile) std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
{ {
stream << lockFile.toJSON().first.dump(2); stream << lockFile.toJSON().first.dump(2);

View file

@ -55,7 +55,7 @@ struct LockFile
ref<Node> root = make_ref<Node>(); ref<Node> root = make_ref<Node>();
LockFile() {}; LockFile() {};
LockFile(const nlohmann::json & json, const Path & path); LockFile(std::string_view contents, std::string_view path);
typedef std::map<ref<const Node>, std::string> KeyMap; typedef std::map<ref<const Node>, std::string> KeyMap;
@ -63,8 +63,6 @@ struct LockFile
std::pair<std::string, KeyMap> to_string() const; std::pair<std::string, KeyMap> to_string() const;
static LockFile read(const Path & path);
/** /**
* Check whether this lock file has any unlocked inputs. If so, * Check whether this lock file has any unlocked inputs. If so,
* return one. * return one.

View file

@ -33,33 +33,16 @@ namespace nix {
static void initLoc(YYLTYPE * loc) static void initLoc(YYLTYPE * loc)
{ {
loc->first_line = loc->last_line = 1; loc->first_line = loc->last_line = 0;
loc->first_column = loc->last_column = 1; loc->first_column = loc->last_column = 0;
} }
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{ {
loc->stash(); loc->stash();
loc->first_line = loc->last_line;
loc->first_column = loc->last_column; loc->first_column = loc->last_column;
loc->last_column += len;
for (size_t i = 0; i < len; i++) {
switch (*s++) {
case '\r':
if (*s == '\n') { /* cr/lf */
i++;
s++;
}
/* fall through */
case '\n':
++loc->last_line;
loc->last_column = 1;
break;
default:
++loc->last_column;
}
}
} }

View file

@ -149,7 +149,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
if (hasFormals()) { if (hasFormals()) {
str << "{ "; str << "{ ";
bool first = true; bool first = true;
for (auto & i : formals->formals) { // the natural Symbol ordering is by creation time, which can lead to the
// same expression being printed in two different ways depending on its
// context. always use lexicographic ordering to avoid this.
for (auto & i : formals->lexicographicOrder(symbols)) {
if (first) first = false; else str << ", "; if (first) first = false; else str << ", ";
str << symbols[i.name]; str << symbols[i.name];
if (i.def) { if (i.def) {
@ -580,6 +583,39 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
/* Position table. */
Pos PosTable::operator[](PosIdx p) const
{
auto origin = resolve(p);
if (!origin)
return {};
const auto offset = origin->offsetOf(p);
Pos result{0, 0, origin->origin};
auto lines = this->lines.lock();
auto linesForInput = (*lines)[origin->offset];
if (linesForInput.empty()) {
auto source = result.getSource().value_or("");
const char * begin = source.data();
for (Pos::LinesIterator it(source), end; it != end; it++)
linesForInput.push_back(it->data() - begin);
if (linesForInput.empty())
linesForInput.push_back(0);
}
// as above: the first line starts at byte 0 and is always present
auto lineStartOffset = std::prev(
std::upper_bound(linesForInput.begin(), linesForInput.end(), offset));
result.line = 1 + (lineStartOffset - linesForInput.begin());
result.column = 1 + (offset - *lineStartOffset);
return result;
}
/* Symbol table. */ /* Symbol table. */
size_t SymbolTable::totalSize() const size_t SymbolTable::totalSize() const

View file

@ -7,7 +7,6 @@
#include "value.hh" #include "value.hh"
#include "symbol-table.hh" #include "symbol-table.hh"
#include "error.hh" #include "error.hh"
#include "chunked-vector.hh"
#include "position.hh" #include "position.hh"
#include "eval-error.hh" #include "eval-error.hh"
#include "pos-idx.hh" #include "pos-idx.hh"

View file

@ -24,20 +24,15 @@ struct ParserLocation
int last_line, last_column; int last_line, last_column;
// backup to recover from yyless(0) // backup to recover from yyless(0)
int stashed_first_line, stashed_first_column; int stashed_first_column, stashed_last_column;
int stashed_last_line, stashed_last_column;
void stash() { void stash() {
stashed_first_line = first_line;
stashed_first_column = first_column; stashed_first_column = first_column;
stashed_last_line = last_line;
stashed_last_column = last_column; stashed_last_column = last_column;
} }
void unstash() { void unstash() {
first_line = stashed_first_line;
first_column = stashed_first_column; first_column = stashed_first_column;
last_line = stashed_last_line;
last_column = stashed_last_column; last_column = stashed_last_column;
} }
}; };
@ -276,7 +271,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
inline PosIdx ParserState::at(const ParserLocation & loc) inline PosIdx ParserState::at(const ParserLocation & loc)
{ {
return positions.add(origin, loc.first_line, loc.first_column); return positions.add(origin, loc.first_column);
} }
} }

View file

@ -64,6 +64,10 @@ using namespace nix;
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
{ {
if (std::string_view(error).starts_with("syntax error, unexpected end of file")) {
loc->first_column = loc->last_column;
loc->first_line = loc->last_line;
}
throw ParseError({ throw ParseError({
.msg = HintFmt(error), .msg = HintFmt(error),
.pos = state->positions[state->at(*loc)] .pos = state->positions[state->at(*loc)]
@ -87,6 +91,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
nix::StringToken uri; nix::StringToken uri;
nix::StringToken str; nix::StringToken str;
std::vector<nix::AttrName> * attrNames; std::vector<nix::AttrName> * attrNames;
std::vector<std::pair<nix::AttrName, nix::PosIdx>> * inheritAttrs;
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts; std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts; std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts;
} }
@ -97,7 +102,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
%type <attrs> binds %type <attrs> binds
%type <formals> formals %type <formals> formals
%type <formal> formal %type <formal> formal
%type <attrNames> attrs attrpath %type <attrNames> attrpath
%type <inheritAttrs> attrs
%type <string_parts> string_parts_interpolated %type <string_parts> string_parts_interpolated
%type <ind_string_parts> ind_string_parts %type <ind_string_parts> ind_string_parts
%type <e> path_start string_parts string_attr %type <e> path_start string_parts string_attr
@ -309,13 +315,12 @@ binds
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; } : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
| binds INHERIT attrs ';' | binds INHERIT attrs ';'
{ $$ = $1; { $$ = $1;
for (auto & i : *$3) { for (auto & [i, iPos] : *$3) {
if ($$->attrs.find(i.symbol) != $$->attrs.end()) if ($$->attrs.find(i.symbol) != $$->attrs.end())
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos); state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
auto pos = state->at(@3);
$$->attrs.emplace( $$->attrs.emplace(
i.symbol, i.symbol,
ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited)); ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited));
} }
delete $3; delete $3;
} }
@ -325,14 +330,14 @@ binds
$$->inheritFromExprs = std::make_unique<std::vector<Expr *>>(); $$->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
$$->inheritFromExprs->push_back($4); $$->inheritFromExprs->push_back($4);
auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1); auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
for (auto & i : *$6) { for (auto & [i, iPos] : *$6) {
if ($$->attrs.find(i.symbol) != $$->attrs.end()) if ($$->attrs.find(i.symbol) != $$->attrs.end())
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos); state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
$$->attrs.emplace( $$->attrs.emplace(
i.symbol, i.symbol,
ExprAttrs::AttrDef( ExprAttrs::AttrDef(
new ExprSelect(CUR_POS, from, i.symbol), new ExprSelect(iPos, from, i.symbol),
state->at(@6), iPos,
ExprAttrs::AttrDef::Kind::InheritedFrom)); ExprAttrs::AttrDef::Kind::InheritedFrom));
} }
delete $6; delete $6;
@ -341,12 +346,12 @@ binds
; ;
attrs attrs
: attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); } : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); }
| attrs string_attr | attrs string_attr
{ $$ = $1; { $$ = $1;
ExprString * str = dynamic_cast<ExprString *>($2); ExprString * str = dynamic_cast<ExprString *>($2);
if (str) { if (str) {
$$->push_back(AttrName(state->symbols.create(str->s))); $$->emplace_back(AttrName(state->symbols.create(str->s)), state->at(@2));
delete str; delete str;
} else } else
throw ParseError({ throw ParseError({
@ -354,7 +359,7 @@ attrs
.pos = state->positions[state->at(@2)] .pos = state->positions[state->at(@2)]
}); });
} }
| { $$ = new AttrPath; } | { $$ = new std::vector<std::pair<AttrName, PosIdx>>; }
; ;
attrpath attrpath
@ -433,7 +438,7 @@ Expr * parseExprFromBuf(
.symbols = symbols, .symbols = symbols,
.positions = positions, .positions = positions,
.basePath = basePath, .basePath = basePath,
.origin = {origin}, .origin = positions.addOrigin(origin, length),
.rootFS = rootFS, .rootFS = rootFS,
.s = astSymbols, .s = astSymbols,
}; };

View file

@ -6,6 +6,7 @@ namespace nix {
class PosIdx class PosIdx
{ {
friend struct LazyPosAcessors;
friend class PosTable; friend class PosTable;
private: private:

View file

@ -7,6 +7,7 @@
#include "chunked-vector.hh" #include "chunked-vector.hh"
#include "pos-idx.hh" #include "pos-idx.hh"
#include "position.hh" #include "position.hh"
#include "sync.hh"
namespace nix { namespace nix {
@ -17,66 +18,69 @@ public:
{ {
friend PosTable; friend PosTable;
private: private:
// must always be invalid by default, add() replaces this with the actual value. uint32_t offset;
// subsequent add() calls use this index as a token to quickly check whether the
// current origins.back() can be reused or not.
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
// Used for searching in PosTable::[]. Origin(Pos::Origin origin, uint32_t offset, size_t size):
explicit Origin(uint32_t idx) offset(offset), origin(origin), size(size)
: idx(idx) {}
, origin{std::monostate()}
{
}
public: public:
const Pos::Origin origin; const Pos::Origin origin;
const size_t size;
Origin(Pos::Origin origin) uint32_t offsetOf(PosIdx p) const
: origin(origin)
{ {
return p.id - 1 - offset;
} }
}; };
struct Offset
{
uint32_t line, column;
};
private: private:
std::vector<Origin> origins; using Lines = std::vector<uint32_t>;
ChunkedVector<Offset, 8192> offsets;
public: std::map<uint32_t, Origin> origins;
PosTable() mutable Sync<std::map<uint32_t, Lines>> lines;
: offsets(1024)
{
origins.reserve(1024);
}
PosIdx add(const Origin & origin, uint32_t line, uint32_t column) const Origin * resolve(PosIdx p) const
{ {
const auto idx = offsets.add({line, column}).second; if (p.id == 0)
if (origins.empty() || origins.back().idx != origin.idx) { return nullptr;
origin.idx = idx;
origins.push_back(origin);
}
return PosIdx(idx + 1);
}
Pos operator[](PosIdx p) const
{
if (p.id == 0 || p.id > offsets.size())
return {};
const auto idx = p.id - 1; const auto idx = p.id - 1;
/* we want the last key <= idx, so we'll take prev(first key > idx). /* we want the last key <= idx, so we'll take prev(first key > idx).
this is guaranteed to never rewind origin.begin because the first this is guaranteed to never rewind origin.begin because the first
key is always 0. */ key is always 0. */
const auto pastOrigin = std::upper_bound( const auto pastOrigin = origins.upper_bound(idx);
origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; }); return &std::prev(pastOrigin)->second;
const auto origin = *std::prev(pastOrigin); }
const auto offset = offsets[idx];
return {offset.line, offset.column, origin.origin}; public:
Origin addOrigin(Pos::Origin origin, size_t size)
{
uint32_t offset = 0;
if (auto it = origins.rbegin(); it != origins.rend())
offset = it->first + it->second.size;
// +1 because all PosIdx are offset by 1 to begin with, and
// another +1 to ensure that all origins can point to EOF, eg
// on (invalid) empty inputs.
if (2 + offset + size < offset)
return Origin{origin, offset, 0};
return origins.emplace(offset, Origin{origin, offset, size}).first->second;
}
PosIdx add(const Origin & origin, size_t offset)
{
if (offset > origin.size)
return PosIdx();
return PosIdx(1 + origin.offset + offset);
}
Pos operator[](PosIdx p) const;
Pos::Origin originOf(PosIdx p) const
{
if (auto o = resolve(p))
return o->origin;
return std::monostate{};
} }
}; };

View file

@ -1736,7 +1736,7 @@ static RegisterPrimOp primop_findFile(PrimOp {
- If the suffix is found inside that directory, then the entry is a match. - If the suffix is found inside that directory, then the entry is a match.
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned. The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath): [Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
```nix ```nix
<nixpkgs> <nixpkgs>
@ -2524,6 +2524,54 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
.fun = prim_unsafeGetAttrPos, .fun = prim_unsafeGetAttrPos,
}); });
// access to exact position information (ie, line and colum numbers) is deferred
// due to the cost associated with calculating that information and how rarely
// it is used in practice. this is achieved by creating thunks to otherwise
// inaccessible primops that are not exposed as __op or under builtins to turn
// the internal PosIdx back into a line and column number, respectively. exposing
// these primops in any way would at best be not useful and at worst create wildly
// indeterministic eval results depending on parse order of files.
//
// in a simpler world this would instead be implemented as another kind of thunk,
// but each type of thunk has an associated runtime cost in the current evaluator.
// as with black holes this cost is too high to justify another thunk type to check
// for in the very hot path that is forceValue.
static struct LazyPosAcessors {
PrimOp primop_lineOfPos{
.arity = 1,
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
v.mkInt(state.positions[PosIdx(args[0]->integer)].line);
}
};
PrimOp primop_columnOfPos{
.arity = 1,
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
v.mkInt(state.positions[PosIdx(args[0]->integer)].column);
}
};
Value lineOfPos, columnOfPos;
LazyPosAcessors()
{
lineOfPos.mkPrimOp(&primop_lineOfPos);
columnOfPos.mkPrimOp(&primop_columnOfPos);
}
void operator()(EvalState & state, const PosIdx pos, Value & line, Value & column)
{
Value * posV = state.allocValue();
posV->mkInt(pos.id);
line.mkApp(&lineOfPos, posV);
column.mkApp(&columnOfPos, posV);
}
} makeLazyPosAccessors;
void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column)
{
makeLazyPosAccessors(state, pos, line, column);
}
/* Dynamic version of the `?' operator. */ /* Dynamic version of the `?' operator. */
static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{ {
@ -4522,11 +4570,9 @@ void EvalState::createBaseEnv()
addConstant("__nixPath", v, { addConstant("__nixPath", v, {
.type = nList, .type = nList,
.doc = R"( .doc = R"(
List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md). The value of the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path): a list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
Lookup path expressions can be Lookup path expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
using this and
[`builtins.findFile`](./builtins.html#builtins-findFile): [`builtins.findFile`](./builtins.html#builtins-findFile):
```nix ```nix

View file

@ -51,4 +51,6 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
*/ */
void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v);
void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column);
} }

View file

@ -137,14 +137,14 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
.name = "__addDrvOutputDependencies", .name = "__addDrvOutputDependencies",
.args = {"s"}, .args = {"s"},
.doc = R"( .doc = R"(
Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element. Create a copy of the given string where a single constant string context element is turned into a "derivation deep" string context element.
The store path that is the constant string context element should point to a valid derivation, and end in `.drv`. The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element. The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
The latter is supported so this function is idempotent. The latter is supported so this function is idempotent.
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies). This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency).
)", )",
.fun = prim_addDrvOutputDependencies .fun = prim_addDrvOutputDependencies
}); });
@ -246,7 +246,7 @@ static RegisterPrimOp primop_getContext({
/* Append the given context to a given string. /* Append the given context to a given string.
See the commentary above unsafeGetContext for details of the See the commentary above getContext for details of the
context representation. context representation.
*/ */
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v) static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)

View file

@ -64,8 +64,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
if (rev) attrs.insert_or_assign("rev", rev->gitRev()); if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs)); auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name auto [storePath, input2] = input.fetchToStore(state.store);
auto [storePath, input2] = input.fetch(state.store);
auto attrs2 = state.buildBindings(8); auto attrs2 = state.buildBindings(8);
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath)); state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));

View file

@ -182,7 +182,7 @@ static void fetchTree(
state.checkURI(input.toURLString()); state.checkURI(input.toURLString());
auto [storePath, input2] = input.fetch(state.store); auto [storePath, input2] = input.fetchToStore(state.store);
state.allowPath(storePath); state.allowPath(storePath);

View file

@ -8,6 +8,29 @@
namespace nix { namespace nix {
/**
* How errors should be handled when printing values.
*/
enum class ErrorPrintBehavior {
/**
* Print the first line of the error in brackets: `«error: oh no!»`
*/
Print,
/**
* Throw the error to the code that attempted to print the value, instead
* of suppressing it it.
*/
Throw,
/**
* Only throw the error if encountered at the top level of the expression.
*
* This will cause expressions like `builtins.throw "uh oh!"` to throw
* errors, but will print attribute sets and other nested structures
* containing values that error (like `nixpkgs`) normally.
*/
ThrowTopLevel,
};
/** /**
* Options for printing Nix values. * Options for printing Nix values.
*/ */
@ -68,6 +91,11 @@ struct PrintOptions
*/ */
size_t prettyIndent = 0; size_t prettyIndent = 0;
/**
* How to handle errors encountered while printing values.
*/
ErrorPrintBehavior errors = ErrorPrintBehavior::Print;
/** /**
* True if pretty-printing is enabled. * True if pretty-printing is enabled.
*/ */
@ -86,7 +114,7 @@ static PrintOptions errorPrintOptions = PrintOptions {
.maxDepth = 10, .maxDepth = 10,
.maxAttrs = 10, .maxAttrs = 10,
.maxListItems = 10, .maxListItems = 10,
.maxStringLength = 1024 .maxStringLength = 1024,
}; };
} }

View file

@ -271,25 +271,21 @@ private:
void printDerivation(Value & v) void printDerivation(Value & v)
{ {
try { Bindings::iterator i = v.attrs->find(state.sDrvPath);
Bindings::iterator i = v.attrs->find(state.sDrvPath); NixStringContext context;
NixStringContext context; std::string storePath;
std::string storePath; if (i != v.attrs->end())
if (i != v.attrs->end()) storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
if (options.ansiColors) if (options.ansiColors)
output << ANSI_GREEN; output << ANSI_GREEN;
output << "«derivation"; output << "«derivation";
if (!storePath.empty()) { if (!storePath.empty()) {
output << " " << storePath; output << " " << storePath;
}
output << "»";
if (options.ansiColors)
output << ANSI_NORMAL;
} catch (Error & e) {
printError_(e);
} }
output << "»";
if (options.ansiColors)
output << ANSI_NORMAL;
} }
bool shouldPrettyPrintAttrs(AttrVec & v) bool shouldPrettyPrintAttrs(AttrVec & v)
@ -510,64 +506,68 @@ private:
output.flush(); output.flush();
checkInterrupt(); checkInterrupt();
if (options.force) { try {
try { if (options.force) {
state.forceValue(v, v.determinePos(noPos)); state.forceValue(v, v.determinePos(noPos));
} catch (Error & e) {
printError_(e);
return;
} }
}
switch (v.type()) { switch (v.type()) {
case nInt: case nInt:
printInt(v); printInt(v);
break; break;
case nFloat: case nFloat:
printFloat(v); printFloat(v);
break; break;
case nBool: case nBool:
printBool(v); printBool(v);
break; break;
case nString: case nString:
printString(v); printString(v);
break; break;
case nPath: case nPath:
printPath(v); printPath(v);
break; break;
case nNull: case nNull:
printNull(); printNull();
break; break;
case nAttrs: case nAttrs:
printAttrs(v, depth); printAttrs(v, depth);
break; break;
case nList: case nList:
printList(v, depth); printList(v, depth);
break; break;
case nFunction: case nFunction:
printFunction(v); printFunction(v);
break; break;
case nThunk: case nThunk:
printThunk(v); printThunk(v);
break; break;
case nExternal: case nExternal:
printExternal(v); printExternal(v);
break; break;
default: default:
printUnknown(); printUnknown();
break; break;
}
} catch (Error & e) {
if (options.errors == ErrorPrintBehavior::Throw
|| (options.errors == ErrorPrintBehavior::ThrowTopLevel
&& depth == 0)) {
throw;
}
printError_(e);
} }
} }

View file

@ -161,7 +161,7 @@ bool Input::contains(const Input & other) const
return false; return false;
} }
std::pair<StorePath, Input> Input::fetch(ref<Store> store) const std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
{ {
if (!scheme) if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@ -186,56 +186,85 @@ std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> { auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
try { try {
return scheme->fetch(store, *this); auto [accessor, final] = getAccessorUnchecked(store);
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
auto narHash = store->queryPathInfo(storePath)->narHash;
final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
scheme->checkLocks(*this, final);
return {storePath, final};
} catch (Error & e) { } catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string()); e.addTrace({}, "while fetching the input '%s'", to_string());
throw; throw;
} }
}(); }();
auto narHash = store->queryPathInfo(storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(),
store->printStorePath(storePath),
prevNarHash->to_string(HashFormat::SRI, true),
narHash.to_string(HashFormat::SRI, true));
}
if (auto prevLastModified = getLastModified()) {
if (input.getLastModified() != prevLastModified)
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevLastModified);
}
if (auto prevRev = getRev()) {
if (input.getRev() != prevRev)
throw Error("'rev' attribute mismatch in input '%s', expected %s",
input.to_string(), prevRev->gitRev());
}
if (auto prevRevCount = getRevCount()) {
if (input.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevRevCount);
}
return {std::move(storePath), input}; return {std::move(storePath), input};
} }
void InputScheme::checkLocks(const Input & specified, const Input & final) const
{
if (auto prevNarHash = specified.getNarHash()) {
if (final.getNarHash() != prevNarHash) {
if (final.getNarHash())
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
else
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
}
}
if (auto prevLastModified = specified.getLastModified()) {
if (final.getLastModified() != prevLastModified)
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
final.to_string(), *prevLastModified);
}
if (auto prevRev = specified.getRev()) {
if (final.getRev() != prevRev)
throw Error("'rev' attribute mismatch in input '%s', expected %s",
final.to_string(), prevRev->gitRev());
}
if (auto prevRevCount = specified.getRevCount()) {
if (final.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
final.to_string(), *prevRevCount);
}
}
std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const
{ {
try { try {
return scheme->getAccessor(store, *this); auto [accessor, final] = getAccessorUnchecked(store);
scheme->checkLocks(*this, final);
return {accessor, std::move(final)};
} catch (Error & e) { } catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string()); e.addTrace({}, "while fetching the input '%s'", to_string());
throw; throw;
} }
} }
std::pair<ref<InputAccessor>, Input> Input::getAccessorUnchecked(ref<Store> store) const
{
// FIXME: cache the accessor
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
auto [accessor, final] = scheme->getAccessor(store, *this);
accessor->fingerprint = scheme->getFingerprint(store, final);
return {accessor, std::move(final)};
}
Input Input::applyOverrides( Input Input::applyOverrides(
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) const std::optional<Hash> rev) const
@ -372,18 +401,6 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
throw Error("do not know how to clone input '%s'", input.to_string()); throw Error("do not know how to clone input '%s'", input.to_string());
} }
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
return {storePath, input2};
}
std::pair<ref<InputAccessor>, Input> InputScheme::getAccessor(ref<Store> store, const Input & input) const
{
throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
}
std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
{ {
return {}; return {};

View file

@ -80,10 +80,21 @@ public:
* Fetch the entire input into the Nix store, returning the * Fetch the entire input into the Nix store, returning the
* location in the Nix store and the locked input. * location in the Nix store and the locked input.
*/ */
std::pair<StorePath, Input> fetch(ref<Store> store) const; std::pair<StorePath, Input> fetchToStore(ref<Store> store) const;
/**
* Return an InputAccessor that allows access to files in the
* input without copying it to the store. Also return a possibly
* unlocked input.
*/
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const; std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const;
private:
std::pair<ref<InputAccessor>, Input> getAccessorUnchecked(ref<Store> store) const;
public:
Input applyOverrides( Input applyOverrides(
std::optional<std::string> ref, std::optional<std::string> ref,
std::optional<Hash> rev) const; std::optional<Hash> rev) const;
@ -173,9 +184,7 @@ struct InputScheme
std::string_view contents, std::string_view contents,
std::optional<std::string> commitMsg) const; std::optional<std::string> commitMsg) const;
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input); virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const = 0;
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const;
/** /**
* Is this `InputScheme` part of an experimental feature? * Is this `InputScheme` part of an experimental feature?
@ -202,6 +211,14 @@ struct InputScheme
*/ */
virtual bool isLocked(const Input & input) const virtual bool isLocked(const Input & input) const
{ return false; } { return false; }
/**
* Check the locking attributes in `final` against
* `specified`. E.g. if `specified` has a `rev` attribute, then
* `final` must have the same `rev` attribute. Throw an exception
* if there is a mismatch.
*/
virtual void checkLocks(const Input & specified, const Input & final) const;
}; };
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher); void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);

View file

@ -761,8 +761,6 @@ struct GitInputScheme : InputScheme
? getAccessorFromCommit(store, repoInfo, std::move(input)) ? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input)); : getAccessorFromWorkdir(store, repoInfo, std::move(input));
accessor->fingerprint = final.getFingerprint(store);
return {accessor, std::move(final)}; return {accessor, std::move(final)};
} }

View file

@ -98,6 +98,10 @@ struct GitArchiveInputScheme : InputScheme
if (ref) input.attrs.insert_or_assign("ref", *ref); if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("host", *host_url); if (host_url) input.attrs.insert_or_assign("host", *host_url);
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
input.attrs.insert_or_assign("narHash", narHash->second);
return input; return input;
} }
@ -111,6 +115,7 @@ struct GitArchiveInputScheme : InputScheme
"narHash", "narHash",
"lastModified", "lastModified",
"host", "host",
"treeHash",
}; };
} }
@ -134,10 +139,13 @@ struct GitArchiveInputScheme : InputScheme
assert(!(ref && rev)); assert(!(ref && rev));
if (ref) path += "/" + *ref; if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false); if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
return ParsedURL { auto url = ParsedURL {
.scheme = std::string { schemeName() }, .scheme = std::string { schemeName() },
.path = path, .path = path,
}; };
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
return url;
} }
Input applyOverrides( Input applyOverrides(
@ -268,15 +276,15 @@ struct GitArchiveInputScheme : InputScheme
{ {
auto [input, tarballInfo] = downloadArchive(store, _input); auto [input, tarballInfo] = downloadArchive(store, _input);
#if 0
input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev()); input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev());
#endif
input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false); auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false);
accessor->setPathDisplay("«" + input.to_string() + "»"); accessor->setPathDisplay("«" + input.to_string() + "»");
accessor->fingerprint = input.getFingerprint(store);
return {accessor, input}; return {accessor, input};
} }

View file

@ -97,7 +97,7 @@ struct IndirectInputScheme : InputScheme
return input; return input;
} }
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const override
{ {
throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
} }

View file

@ -6,8 +6,8 @@
#include "tarfile.hh" #include "tarfile.hh"
#include "store-api.hh" #include "store-api.hh"
#include "url-parts.hh" #include "url-parts.hh"
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh" #include "posix-source-accessor.hh"
#include "fetch-settings.hh" #include "fetch-settings.hh"
#include <sys/time.h> #include <sys/time.h>
@ -161,9 +161,9 @@ struct MercurialInputScheme : InputScheme
return {isLocal, isLocal ? url.path : url.base}; return {isLocal, isLocal ? url.path : url.base};
} }
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override StorePath fetchToStore(ref<Store> store, Input & input) const
{ {
Input input(_input); auto origRev = input.getRev();
auto name = input.getName(); auto name = input.getName();
@ -218,7 +218,7 @@ struct MercurialInputScheme : InputScheme
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
filter); filter);
return {std::move(storePath), input}; return storePath;
} }
} }
@ -242,13 +242,12 @@ struct MercurialInputScheme : InputScheme
}); });
}; };
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath) auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
-> std::pair<StorePath, Input>
{ {
assert(input.getRev()); assert(input.getRev());
assert(!_input.getRev() || _input.getRev() == input.getRev()); assert(!origRev || origRev == input.getRev());
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount")); input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
return {std::move(storePath), input}; return storePath;
}; };
if (input.getRev()) { if (input.getRev()) {
@ -329,7 +328,7 @@ struct MercurialInputScheme : InputScheme
{"revCount", (uint64_t) revCount}, {"revCount", (uint64_t) revCount},
}); });
if (!_input.getRev()) if (!origRev)
getCache()->add( getCache()->add(
*store, *store,
unlockedAttrs, unlockedAttrs,
@ -347,6 +346,15 @@ struct MercurialInputScheme : InputScheme
return makeResult(infoAttrs, std::move(storePath)); return makeResult(infoAttrs, std::move(storePath));
} }
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
{
Input input(_input);
auto storePath = fetchToStore(store, input);
return {makeStorePathAccessor(store, storePath), input};
}
bool isLocked(const Input & input) const override bool isLocked(const Input & input) const override
{ {
return (bool) input.getRev(); return (bool) input.getRev();

View file

@ -1,6 +1,8 @@
#include "fetchers.hh" #include "fetchers.hh"
#include "store-api.hh" #include "store-api.hh"
#include "archive.hh" #include "archive.hh"
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
namespace nix::fetchers { namespace nix::fetchers {
@ -87,6 +89,15 @@ struct PathInputScheme : InputScheme
writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents); writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
} }
std::optional<std::string> isRelative(const Input & input) const
{
auto path = getStrAttr(input.attrs, "path");
if (hasPrefix(path, "/"))
return std::nullopt;
else
return path;
}
bool isLocked(const Input & input) const override bool isLocked(const Input & input) const override
{ {
return (bool) input.getNarHash(); return (bool) input.getNarHash();
@ -102,7 +113,7 @@ struct PathInputScheme : InputScheme
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string()); throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
} }
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
{ {
Input input(_input); Input input(_input);
std::string absPath; std::string absPath;
@ -144,7 +155,24 @@ struct PathInputScheme : InputScheme
} }
input.attrs.insert_or_assign("lastModified", uint64_t(mtime)); input.attrs.insert_or_assign("lastModified", uint64_t(mtime));
return {std::move(*storePath), input}; return {makeStorePathAccessor(store, *storePath), std::move(input)};
}
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
{
if (isRelative(input))
return std::nullopt;
/* If this path is in the Nix store, use the hash of the
store object and the subpath. */
auto path = getAbsPath(input);
try {
auto [storePath, subPath] = store->toStorePath(path.abs());
auto info = store->queryPathInfo(storePath);
return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath);
} catch (Error &) {
return std::nullopt;
}
} }
std::optional<ExperimentalFeature> experimentalFeature() const override std::optional<ExperimentalFeature> experimentalFeature() const override

View file

@ -123,6 +123,11 @@ struct KeyedBuildResult : BuildResult
* The derivation we built or the store path we substituted. * The derivation we built or the store path we substituted.
*/ */
DerivedPath path; DerivedPath path;
// Hack to work around a gcc "may be used uninitialized" warning.
KeyedBuildResult(BuildResult res, DerivedPath path)
: BuildResult(std::move(res)), path(std::move(path))
{ }
}; };
} }

View file

@ -2480,6 +2480,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
CanonPath { tmpDir + "/tmp" }).hash; CanonPath { tmpDir + "/tmp" }).hash;
} }
} }
assert(false);
}(); }();
ValidPathInfo newInfo0 { ValidPathInfo newInfo0 {
@ -2543,6 +2544,12 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
[&](const DerivationOutput::CAFixed & dof) { [&](const DerivationOutput::CAFixed & dof) {
auto & wanted = dof.ca.hash; auto & wanted = dof.ca.hash;
// Replace the output by a fresh copy of itself to make sure
// that there's no stale file descriptor pointing to it
Path tmpOutput = actualPath + ".tmp";
copyFile(actualPath, tmpOutput, true);
renameFile(tmpOutput, actualPath);
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating { auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
.method = dof.ca.method, .method = dof.ca.method,
.hashAlgo = wanted.algo, .hashAlgo = wanted.algo,

View file

@ -45,7 +45,7 @@ R""(
; allow it if the package explicitly asks for it. ; allow it if the package explicitly asks for it.
(if (param "_ALLOW_LOCAL_NETWORKING") (if (param "_ALLOW_LOCAL_NETWORKING")
(begin (begin
(allow network* (local ip) (local tcp) (local udp)) (allow network* (remote ip "localhost:*"))
; Allow access to /etc/resolv.conf (which is a symlink to ; Allow access to /etc/resolv.conf (which is a symlink to
; /private/var/run/resolv.conf). ; /private/var/run/resolv.conf).

View file

@ -415,6 +415,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
dumpMethod = FileSerialisationMethod::Recursive; dumpMethod = FileSerialisationMethod::Recursive;
break; break;
default:
assert(false);
} }
// TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store. // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair); auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);

View file

@ -527,6 +527,8 @@ StorePath RemoteStore::addToStoreFromDump(
// Use NAR; Git is not a serialization method // Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive; fsm = FileSerialisationMethod::Recursive;
break; break;
default:
assert(false);
} }
if (fsm != dumpMethod) if (fsm != dumpMethod)
unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination"); unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");

View file

@ -38,6 +38,11 @@ unsigned int getMaxCPU()
auto cpuMax = readFile(cpuFile); auto cpuMax = readFile(cpuFile);
auto cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " \n"); auto cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " \n");
if (cpuMaxParts.size() != 2) {
return 0;
}
auto quota = cpuMaxParts[0]; auto quota = cpuMaxParts[0];
auto period = cpuMaxParts[1]; auto period = cpuMaxParts[1];
if (quota != "max") if (quota != "max")

View file

@ -1,4 +1,5 @@
#include "experimental-features.hh" #include "experimental-features.hh"
#include "fmt.hh"
#include "util.hh" #include "util.hh"
#include "nlohmann/json.hpp" #include "nlohmann/json.hpp"
@ -10,6 +11,7 @@ struct ExperimentalFeatureDetails
ExperimentalFeature tag; ExperimentalFeature tag;
std::string_view name; std::string_view name;
std::string_view description; std::string_view description;
std::string_view trackingUrl;
}; };
/** /**
@ -35,6 +37,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
[__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed) [__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed)
for details. for details.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/35",
}, },
{ {
.tag = Xp::ImpureDerivations, .tag = Xp::ImpureDerivations,
@ -65,6 +68,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime). This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime).
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/42",
}, },
{ {
.tag = Xp::Flakes, .tag = Xp::Flakes,
@ -73,6 +77,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enable flakes. See the manual entry for [`nix Enable flakes. See the manual entry for [`nix
flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details. flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/27",
}, },
{ {
.tag = Xp::FetchTree, .tag = Xp::FetchTree,
@ -86,6 +91,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation. Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/31",
}, },
{ {
.tag = Xp::NixCommand, .tag = Xp::NixCommand,
@ -94,6 +100,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enable the new `nix` subcommands. See the manual on Enable the new `nix` subcommands. See the manual on
[`nix`](@docroot@/command-ref/new-cli/nix.md) for details. [`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/28",
}, },
{ {
.tag = Xp::GitHashing, .tag = Xp::GitHashing,
@ -102,6 +109,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm. Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
These store objects will not be understandable by older versions of Nix. These store objects will not be understandable by older versions of Nix.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/41",
}, },
{ {
.tag = Xp::RecursiveNix, .tag = Xp::RecursiveNix,
@ -143,6 +151,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
already in the build inputs or built by a previous recursive Nix already in the build inputs or built by a previous recursive Nix
call. call.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/47",
}, },
{ {
.tag = Xp::NoUrlLiterals, .tag = Xp::NoUrlLiterals,
@ -184,6 +193,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
containing parameters have to be quoted anyway, and unquoted URLs containing parameters have to be quoted anyway, and unquoted URLs
may confuse external tooling. may confuse external tooling.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/44",
}, },
{ {
.tag = Xp::FetchClosure, .tag = Xp::FetchClosure,
@ -191,6 +201,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language. Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/40",
}, },
{ {
.tag = Xp::ReplFlake, .tag = Xp::ReplFlake,
@ -200,6 +211,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands. Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/32",
}, },
{ {
.tag = Xp::AutoAllocateUids, .tag = Xp::AutoAllocateUids,
@ -208,6 +220,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allows Nix to automatically pick UIDs for builds, rather than creating Allows Nix to automatically pick UIDs for builds, rather than creating
`nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details. `nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/34",
}, },
{ {
.tag = Xp::Cgroups, .tag = Xp::Cgroups,
@ -216,6 +229,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allows Nix to execute builds inside cgroups. See Allows Nix to execute builds inside cgroups. See
the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details. the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/36",
}, },
{ {
.tag = Xp::DaemonTrustOverride, .tag = Xp::DaemonTrustOverride,
@ -226,6 +240,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
useful for various experiments with `nix-daemon --stdio` useful for various experiments with `nix-daemon --stdio`
networking. networking.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/38",
}, },
{ {
.tag = Xp::DynamicDerivations, .tag = Xp::DynamicDerivations,
@ -239,6 +254,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
- dependencies in derivations on the outputs of - dependencies in derivations on the outputs of
derivations that are themselves derivations outputs. derivations that are themselves derivations outputs.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/39",
}, },
{ {
.tag = Xp::ParseTomlTimestamps, .tag = Xp::ParseTomlTimestamps,
@ -246,6 +262,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Allow parsing of timestamps in builtins.fromTOML. Allow parsing of timestamps in builtins.fromTOML.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/45",
}, },
{ {
.tag = Xp::ReadOnlyLocalStore, .tag = Xp::ReadOnlyLocalStore,
@ -253,6 +270,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs. Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/46",
}, },
{ {
.tag = Xp::LocalOverlayStore, .tag = Xp::LocalOverlayStore,
@ -260,7 +278,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store). Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
)", )",
.trackingUrl = ""https://github.com/NixOS/nix/milestone/50", .trackingUrl = "https://github.com/NixOS/nix/milestone/50",
}, },
{ {
.tag = Xp::ConfigurableImpureEnv, .tag = Xp::ConfigurableImpureEnv,
@ -268,6 +286,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting. Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/37",
}, },
{ {
.tag = Xp::MountedSSHStore, .tag = Xp::MountedSSHStore,
@ -275,6 +294,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted). Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted).
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/43",
}, },
{ {
.tag = Xp::VerifiedFetches, .tag = Xp::VerifiedFetches,
@ -282,6 +302,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"( .description = R"(
Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in. Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in.
)", )",
.trackingUrl = "https://github.com/NixOS/nix/milestone/48",
}, },
}}; }};
@ -320,9 +341,12 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag)
nlohmann::json documentExperimentalFeatures() nlohmann::json documentExperimentalFeatures()
{ {
StringMap res; StringMap res;
for (auto & xpFeature : xpFeatureDetails) for (auto & xpFeature : xpFeatureDetails) {
res[std::string { xpFeature.name }] = std::stringstream docOss;
trim(stripIndentation(xpFeature.description)); docOss << stripIndentation(xpFeature.description);
docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl);
res[std::string{xpFeature.name}] = trim(docOss.str());
}
return (nlohmann::json) res; return (nlohmann::json) res;
} }

View file

@ -123,7 +123,7 @@ Hash hashPath(
case FileIngestionMethod::Git: case FileIngestionMethod::Git:
return git::dumpHash(ht, accessor, path, filter).hash; return git::dumpHash(ht, accessor, path, filter).hash;
} }
assert(false);
} }
} }

View file

@ -617,6 +617,11 @@ void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
} }
} }
void copyFile(const Path & oldPath, const Path & newPath, bool andDelete)
{
return copy(fs::directory_entry(fs::path(oldPath)), fs::path(newPath), andDelete);
}
void renameFile(const Path & oldName, const Path & newName) void renameFile(const Path & oldName, const Path & newName)
{ {
fs::rename(oldName, newName); fs::rename(oldName, newName);

View file

@ -186,6 +186,13 @@ void renameFile(const Path & src, const Path & dst);
*/ */
void moveFile(const Path & src, const Path & dst); void moveFile(const Path & src, const Path & dst);
/**
* Recursively copy the content of `oldPath` to `newPath`. If `andDelete` is
* `true`, then also remove `oldPath` (making this equivalent to `moveFile`, but
* with the guaranty that the destination will be fresh, with no stale inode
* or file descriptor pointing to it).
*/
void copyFile(const Path & oldPath, const Path & newPath, bool andDelete);
/** /**
* Automatic cleanup of resources. * Automatic cleanup of resources.

View file

@ -8,7 +8,6 @@
namespace nix { namespace nix {
namespace {
/** /**
* A helper for writing `boost::format` expressions. * A helper for writing `boost::format` expressions.
* *
@ -35,14 +34,13 @@ inline void formatHelper(F & f, const T & x, const Args & ... args)
/** /**
* Set the correct exceptions for `fmt`. * Set the correct exceptions for `fmt`.
*/ */
void setExceptions(boost::format & fmt) inline void setExceptions(boost::format & fmt)
{ {
fmt.exceptions( fmt.exceptions(
boost::io::all_error_bits ^ boost::io::all_error_bits ^
boost::io::too_many_args_bit ^ boost::io::too_many_args_bit ^
boost::io::too_few_args_bit); boost::io::too_few_args_bit);
} }
}
/** /**
* A helper for writing a `boost::format` expression to a string. * A helper for writing a `boost::format` expression to a string.

View file

@ -56,31 +56,63 @@ void parseBlob(
FileSystemObjectSink & sink, FileSystemObjectSink & sink,
const Path & sinkPath, const Path & sinkPath,
Source & source, Source & source,
bool executable, BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings) const ExperimentalFeatureSettings & xpSettings)
{ {
xpSettings.require(Xp::GitHashing); xpSettings.require(Xp::GitHashing);
sink.createRegularFile(sinkPath, [&](auto & crf) { unsigned long long size = std::stoi(getStringUntil(source, 0));
if (executable)
crf.isExecutable();
unsigned long long size = std::stoi(getStringUntil(source, 0)); auto doRegularFile = [&](bool executable) {
sink.createRegularFile(sinkPath, [&](auto & crf) {
if (executable)
crf.isExecutable();
crf.preallocateContents(size); crf.preallocateContents(size);
unsigned long long left = size; unsigned long long left = size;
std::string buf; std::string buf;
buf.reserve(65536); buf.reserve(65536);
while (left) { while (left) {
checkInterrupt();
buf.resize(std::min((unsigned long long)buf.capacity(), left));
source(buf);
crf(buf);
left -= buf.size();
}
});
};
switch (blobMode) {
case BlobMode::Regular:
doRegularFile(false);
break;
case BlobMode::Executable:
doRegularFile(true);
break;
case BlobMode::Symlink:
{
std::string target;
target.resize(size, '0');
target.reserve(size);
for (size_t n = 0; n < target.size();) {
checkInterrupt(); checkInterrupt();
buf.resize(std::min((unsigned long long)buf.capacity(), left)); n += source.read(
source(buf); const_cast<char *>(target.c_str()) + n,
crf(buf); target.size() - n);
left -= buf.size();
} }
});
sink.createSymlink(sinkPath, target);
break;
}
default:
assert(false);
}
} }
void parseTree( void parseTree(
@ -142,7 +174,7 @@ void parse(
FileSystemObjectSink & sink, FileSystemObjectSink & sink,
const Path & sinkPath, const Path & sinkPath,
Source & source, Source & source,
bool executable, BlobMode rootModeIfBlob,
std::function<SinkHook> hook, std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings) const ExperimentalFeatureSettings & xpSettings)
{ {
@ -152,7 +184,7 @@ void parse(
switch (type) { switch (type) {
case ObjectType::Blob: case ObjectType::Blob:
parseBlob(sink, sinkPath, source, executable, xpSettings); parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings);
break; break;
case ObjectType::Tree: case ObjectType::Tree:
parseTree(sink, sinkPath, source, hook, xpSettings); parseTree(sink, sinkPath, source, hook, xpSettings);
@ -177,7 +209,7 @@ std::optional<Mode> convertMode(SourceAccessor::Type type)
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook) void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook)
{ {
parse(sink, "", source, false, [&](Path name, TreeEntry entry) { parse(sink, "", source, BlobMode::Regular, [&](Path name, TreeEntry entry) {
auto [accessor, from] = hook(entry.hash); auto [accessor, from] = hook(entry.hash);
auto stat = accessor->lstat(from); auto stat = accessor->lstat(from);
auto gotOpt = convertMode(stat.type); auto gotOpt = convertMode(stat.type);
@ -275,6 +307,13 @@ Mode dump(
} }
case SourceAccessor::tSymlink: case SourceAccessor::tSymlink:
{
auto target = accessor.readLink(path);
dumpBlobPrefix(target.size(), sink, xpSettings);
sink(target);
return Mode::Symlink;
}
case SourceAccessor::tMisc: case SourceAccessor::tMisc:
default: default:
throw Error("file '%1%' has an unsupported type", path); throw Error("file '%1%' has an unsupported type", path);

View file

@ -75,10 +75,23 @@ ObjectType parseObjectType(
Source & source, Source & source,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* These 3 modes are represented by blob objects.
*
* Sometimes we need this information to disambiguate how a blob is
* being used to better match our own "file system object" data model.
*/
enum struct BlobMode : RawMode
{
Regular = static_cast<RawMode>(Mode::Regular),
Executable = static_cast<RawMode>(Mode::Executable),
Symlink = static_cast<RawMode>(Mode::Symlink),
};
void parseBlob( void parseBlob(
FileSystemObjectSink & sink, const Path & sinkPath, FileSystemObjectSink & sink, const Path & sinkPath,
Source & source, Source & source,
bool executable, BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
void parseTree( void parseTree(
@ -89,11 +102,15 @@ void parseTree(
/** /**
* Helper putting the previous three `parse*` functions together. * Helper putting the previous three `parse*` functions together.
*
* @rootModeIfBlob How to interpret a root blob, for which there is no
* disambiguating dir entry to answer that questino. If the root it not
* a blob, this is ignored.
*/ */
void parse( void parse(
FileSystemObjectSink & sink, const Path & sinkPath, FileSystemObjectSink & sink, const Path & sinkPath,
Source & source, Source & source,
bool executable, BlobMode rootModeIfBlob,
std::function<SinkHook> hook, std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);

View file

@ -29,32 +29,17 @@ std::optional<LinesOfCode> Pos::getCodeLines() const
return std::nullopt; return std::nullopt;
if (auto source = getSource()) { if (auto source = getSource()) {
LinesIterator lines(*source), end;
std::istringstream iss(*source);
// count the newlines.
int count = 0;
std::string curLine;
int pl = line - 1;
LinesOfCode loc; LinesOfCode loc;
do { if (line > 1)
std::getline(iss, curLine); std::advance(lines, line - 2);
++count; if (lines != end && line > 1)
if (count < pl) loc.prevLineOfCode = *lines++;
; if (lines != end)
else if (count == pl) { loc.errLineOfCode = *lines++;
loc.prevLineOfCode = curLine; if (lines != end)
} else if (count == pl + 1) { loc.nextLineOfCode = *lines++;
loc.errLineOfCode = curLine;
} else if (count == pl + 2) {
loc.nextLineOfCode = curLine;
break;
}
if (!iss.good())
break;
} while (true);
return loc; return loc;
} }
@ -109,4 +94,26 @@ std::ostream & operator<<(std::ostream & str, const Pos & pos)
return str; return str;
} }
void Pos::LinesIterator::bump(bool atFirst)
{
if (!atFirst) {
pastEnd = input.empty();
if (!input.empty() && input[0] == '\r')
input.remove_prefix(1);
if (!input.empty() && input[0] == '\n')
input.remove_prefix(1);
}
// nix line endings are not only \n as eg std::getline assumes, but also
// \r\n **and \r alone**. not treating them all the same causes error
// reports to not match with line numbers as the parser expects them.
auto eol = input.find_first_of("\r\n");
if (eol > input.size())
eol = input.size();
curLine = input.substr(0, eol);
input.remove_prefix(eol);
}
} }

View file

@ -67,6 +67,48 @@ struct Pos
bool operator==(const Pos & rhs) const = default; bool operator==(const Pos & rhs) const = default;
bool operator!=(const Pos & rhs) const = default; bool operator!=(const Pos & rhs) const = default;
bool operator<(const Pos & rhs) const; bool operator<(const Pos & rhs) const;
struct LinesIterator {
using difference_type = size_t;
using value_type = std::string_view;
using reference = const std::string_view &;
using pointer = const std::string_view *;
using iterator_category = std::input_iterator_tag;
LinesIterator(): pastEnd(true) {}
explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) {
if (!pastEnd)
bump(true);
}
LinesIterator & operator++() {
bump(false);
return *this;
}
LinesIterator operator++(int) {
auto result = *this;
++*this;
return result;
}
reference operator*() const { return curLine; }
pointer operator->() const { return &curLine; }
bool operator!=(const LinesIterator & other) const {
return !(*this == other);
}
bool operator==(const LinesIterator & other) const {
return (pastEnd && other.pastEnd)
|| (std::forward_as_tuple(input.size(), input.data())
== std::forward_as_tuple(other.input.size(), other.input.data()));
}
private:
std::string_view input, curLine;
bool pastEnd = false;
void bump(bool atFirst);
};
}; };
std::ostream & operator<<(std::ostream & str, const Pos & pos); std::ostream & operator<<(std::ostream & str, const Pos & pos);

View file

@ -85,16 +85,20 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path)
std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path) std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path)
{ {
static Sync<std::unordered_map<CanonPath, std::optional<struct stat>>> _cache; static Sync<std::unordered_map<Path, std::optional<struct stat>>> _cache;
// Note: we convert std::filesystem::path to Path because the
// former is not hashable on libc++.
Path absPath = makeAbsPath(path);
{ {
auto cache(_cache.lock()); auto cache(_cache.lock());
auto i = cache->find(path); auto i = cache->find(absPath);
if (i != cache->end()) return i->second; if (i != cache->end()) return i->second;
} }
std::optional<struct stat> st{std::in_place}; std::optional<struct stat> st{std::in_place};
if (::lstat(makeAbsPath(path).c_str(), &*st)) { if (::lstat(absPath.c_str(), &*st)) {
if (errno == ENOENT || errno == ENOTDIR) if (errno == ENOENT || errno == ENOTDIR)
st.reset(); st.reset();
else else
@ -103,7 +107,7 @@ std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & pa
auto cache(_cache.lock()); auto cache(_cache.lock());
if (cache->size() >= 16384) cache->clear(); if (cache->size() >= 16384) cache->clear();
cache->emplace(path, st); cache->emplace(absPath, st);
return st; return st;
} }

View file

@ -9,6 +9,10 @@
#include <sodium.h> #include <sodium.h>
#ifdef NDEBUG
#error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)."
#endif
namespace nix { namespace nix {
void initLibUtil() { void initLibUtil() {

View file

@ -11,6 +11,12 @@ R""(
Note the `file://` - without this, the destination is a chroot Note the `file://` - without this, the destination is a chroot
store, not a binary cache. store, not a binary cache.
* Copy all store paths from a local binary cache in `/tmp/cache` to the local store:
```console
# nix copy --all --from file:///tmp/cache
```
* Copy the entire current NixOS system closure to another machine via * Copy the entire current NixOS system closure to another machine via
SSH: SSH:

View file

@ -120,8 +120,17 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
} }
else { else {
state->forceValueDeep(*v); logger->cout(
logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true })); "%s",
ValuePrinter(
*state,
*v,
PrintOptions {
.force = true,
.derivationPaths = true
}
)
);
} }
} }
}; };

View file

@ -88,17 +88,19 @@ public:
expectArgs({ expectArgs({
.label="inputs", .label="inputs",
.optional=true, .optional=true,
.handler={[&](std::string inputToUpdate){ .handler={[&](std::vector<std::string> inputsToUpdate){
InputPath inputPath; for (auto inputToUpdate : inputsToUpdate) {
try { InputPath inputPath;
inputPath = flake::parseInputPath(inputToUpdate); try {
} catch (Error & e) { inputPath = flake::parseInputPath(inputToUpdate);
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate); } catch (Error & e) {
throw e; warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
throw e;
}
if (lockFlags.inputUpdates.contains(inputPath))
warn("Input '%s' was specified multiple times. You may have done this by accident.");
lockFlags.inputUpdates.insert(inputPath);
} }
if (lockFlags.inputUpdates.contains(inputPath))
warn("Input '%s' was specified multiple times. You may have done this by accident.");
lockFlags.inputUpdates.insert(inputPath);
}}, }},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
@ -205,6 +207,9 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
auto lockedFlake = lockFlake(); auto lockedFlake = lockFlake();
auto & flake = lockedFlake.flake; auto & flake = lockedFlake.flake;
// Currently, all flakes are in the Nix store via the rootFS accessor.
auto storePath = store->printStorePath(store->toStorePath(flake.path.path.abs()).first);
if (json) { if (json) {
nlohmann::json j; nlohmann::json j;
if (flake.description) if (flake.description)
@ -214,6 +219,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["resolvedUrl"] = flake.resolvedRef.to_string(); j["resolvedUrl"] = flake.resolvedRef.to_string();
j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs()); j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
// "locked" is a misnomer - this is the result of the
// attempt to lock.
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs()); j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev()) if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(HashFormat::Base16, false); j["revision"] = rev->to_string(HashFormat::Base16, false);
@ -223,23 +230,24 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["revCount"] = *revCount; j["revCount"] = *revCount;
if (auto lastModified = flake.lockedRef.input.getLastModified()) if (auto lastModified = flake.lockedRef.input.getLastModified())
j["lastModified"] = *lastModified; j["lastModified"] = *lastModified;
j["path"] = store->printStorePath(flake.storePath); j["path"] = storePath;
j["locks"] = lockedFlake.lockFile.toJSON().first; j["locks"] = lockedFlake.lockFile.toJSON().first;
logger->cout("%s", j.dump()); logger->cout("%s", j.dump());
} else { } else {
logger->cout( logger->cout(
ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s", ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s",
flake.resolvedRef.to_string()); flake.resolvedRef.to_string());
logger->cout( if (flake.lockedRef.input.isLocked())
ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s", logger->cout(
flake.lockedRef.to_string()); ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
flake.lockedRef.to_string());
if (flake.description) if (flake.description)
logger->cout( logger->cout(
ANSI_BOLD "Description:" ANSI_NORMAL " %s", ANSI_BOLD "Description:" ANSI_NORMAL " %s",
*flake.description); *flake.description);
logger->cout( logger->cout(
ANSI_BOLD "Path:" ANSI_NORMAL " %s", ANSI_BOLD "Path:" ANSI_NORMAL " %s",
store->printStorePath(flake.storePath)); storePath);
if (auto rev = flake.lockedRef.input.getRev()) if (auto rev = flake.lockedRef.input.getRev())
logger->cout( logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s", ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
@ -474,6 +482,8 @@ struct CmdFlakeCheck : FlakeCommand
checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
try { try {
Activity act(*logger, lvlInfo, actUnknown,
fmt("checking Hydra job '%s'", attrPath));
state->forceAttrs(v, pos, ""); state->forceAttrs(v, pos, "");
if (state->isDerivation(v)) if (state->isDerivation(v))
@ -1031,7 +1041,9 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
StorePathSet sources; StorePathSet sources;
sources.insert(flake.flake.storePath); auto storePath = store->toStorePath(flake.flake.path.path.abs()).first;
sources.insert(storePath);
// FIXME: use graph output, handle cycles. // FIXME: use graph output, handle cycles.
std::function<nlohmann::json(const Node & node)> traverse; std::function<nlohmann::json(const Node & node)> traverse;
@ -1043,7 +1055,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
auto storePath = auto storePath =
dryRun dryRun
? (*inputNode)->lockedRef.input.computeStorePath(*store) ? (*inputNode)->lockedRef.input.computeStorePath(*store)
: (*inputNode)->lockedRef.input.fetch(store).first; : (*inputNode)->lockedRef.input.fetchToStore(store).first;
if (json) { if (json) {
auto& jsonObj3 = jsonObj2[inputName]; auto& jsonObj3 = jsonObj2[inputName];
jsonObj3["path"] = store->printStorePath(storePath); jsonObj3["path"] = store->printStorePath(storePath);
@ -1060,7 +1072,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
if (json) { if (json) {
nlohmann::json jsonRoot = { nlohmann::json jsonRoot = {
{"path", store->printStorePath(flake.flake.storePath)}, {"path", store->printStorePath(storePath)},
{"inputs", traverse(*flake.lockFile.root)}, {"inputs", traverse(*flake.lockFile.root)},
}; };
logger->cout("%s", jsonRoot); logger->cout("%s", jsonRoot);

View file

@ -11,9 +11,16 @@ R""(
* Remove all packages: * Remove all packages:
```console ```console
# nix profile remove '.*' # nix profile remove --all
``` ```
* Remove packages by regular expression:
```console
# nix profile remove --regex '.*vim.*'
```
* Remove a package by store path: * Remove a package by store path:
```console ```console

View file

@ -6,7 +6,7 @@ R""(
reference: reference:
```console ```console
# nix profile upgrade '.*' # nix profile upgrade --all
``` ```
* Upgrade a specific package by name: * Upgrade a specific package by name:
@ -15,6 +15,12 @@ R""(
# nix profile upgrade hello # nix profile upgrade hello
``` ```
* Upgrade all packages that include 'vim' in their name:
```console
# nix profile upgrade --regex '.*vim.*'
```
# Description # Description
This command upgrades a previously installed package in a Nix profile, This command upgrades a previously installed package in a Nix profile,

View file

@ -222,6 +222,8 @@ struct ProfileManifest
es[name] = obj; es[name] = obj;
} }
nlohmann::json json; nlohmann::json json;
// Only upgrade with great care as changing it can break fresh installs
// like in https://github.com/NixOS/nix/issues/10109
json["version"] = 3; json["version"] = 3;
json["elements"] = es; json["elements"] = es;
return json; return json;
@ -477,55 +479,151 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
} }
}; };
class MixProfileElementMatchers : virtual Args struct Matcher
{ {
std::vector<std::string> _matchers; virtual ~Matcher() { }
virtual std::string getTitle() = 0;
virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
};
struct RegexMatcher final : public Matcher
{
std::regex regex;
std::string pattern;
RegexMatcher(const std::string & pattern) : regex(pattern, std::regex::extended | std::regex::icase), pattern(pattern)
{ }
std::string getTitle() override
{
return fmt("Regex '%s'", pattern);
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return std::regex_match(element.identifier(), regex);
}
};
struct StorePathMatcher final : public Matcher
{
nix::StorePath storePath;
StorePathMatcher(const nix::StorePath & storePath) : storePath(storePath)
{ }
std::string getTitle() override
{
return fmt("Store path '%s'", storePath.to_string());
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return element.storePaths.count(storePath);
}
};
struct NameMatcher final : public Matcher
{
std::string name;
NameMatcher(const std::string & name) : name(name)
{ }
std::string getTitle() override
{
return fmt("Package name '%s'", name);
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return name == this->name;
}
};
struct AllMatcher final : public Matcher
{
std::string getTitle() override
{
return "--all";
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return true;
}
};
AllMatcher all;
class MixProfileElementMatchers : virtual Args, virtual StoreCommand
{
std::vector<ref<Matcher>> _matchers;
public: public:
MixProfileElementMatchers() MixProfileElementMatchers()
{ {
expectArgs("elements", &_matchers); addFlag({
.longName = "all",
.description = "Match all packages in the profile.",
.handler = {[this]() {
_matchers.push_back(ref<AllMatcher>(std::shared_ptr<AllMatcher>(&all, [](AllMatcher*) {})));
}},
});
addFlag({
.longName = "regex",
.description = "A regular expression to match one or more packages in the profile.",
.labels = {"pattern"},
.handler = {[this](std::string arg) {
_matchers.push_back(make_ref<RegexMatcher>(arg));
}},
});
expectArgs({
.label = "elements",
.optional = true,
.handler = {[this](std::vector<std::string> args) {
for (auto & arg : args) {
if (auto n = string2Int<size_t>(arg)) {
throw Error("'nix profile' no longer supports indices ('%d')", *n);
} else if (getStore()->isStorePath(arg)) {
_matchers.push_back(make_ref<StorePathMatcher>(getStore()->parseStorePath(arg)));
} else {
_matchers.push_back(make_ref<NameMatcher>(arg));
}
}
}}
});
} }
struct RegexPattern { std::set<std::string> getMatchingElementNames(ProfileManifest & manifest) {
std::string pattern; if (_matchers.empty()) {
std::regex reg; throw UsageError("No packages specified.");
};
typedef std::variant<Path, RegexPattern> Matcher;
std::vector<Matcher> getMatchers(ref<Store> store)
{
std::vector<Matcher> res;
for (auto & s : _matchers) {
if (auto n = string2Int<size_t>(s))
throw Error("'nix profile' no longer supports indices ('%d')", *n);
else if (store->isStorePath(s))
res.push_back(s);
else
res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
} }
return res; if (std::find_if(_matchers.begin(), _matchers.end(), [](const ref<Matcher> & m) { return m.dynamic_pointer_cast<AllMatcher>(); }) != _matchers.end() && _matchers.size() > 1) {
} throw UsageError("--all cannot be used with package names or regular expressions.");
}
bool matches( if (manifest.elements.empty()) {
const Store & store, warn("There are no packages in the profile.");
const std::string & name, return {};
const ProfileElement & element, }
const std::vector<Matcher> & matchers)
{ std::set<std::string> result;
for (auto & matcher : matchers) { for (auto & matcher : _matchers) {
if (auto path = std::get_if<Path>(&matcher)) { bool foundMatch = false;
if (element.storePaths.count(store.parseStorePath(*path))) return true; for (auto & [name, element] : manifest.elements) {
} else if (auto regex = std::get_if<RegexPattern>(&matcher)) { if (matcher->matches(name, element)) {
if (std::regex_match(name, regex->reg)) result.insert(name);
return true; foundMatch = true;
}
}
if (!foundMatch) {
warn("%s does not match any packages in the profile.", matcher->getTitle());
} }
} }
return result;
return false;
} }
}; };
@ -547,16 +645,19 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
{ {
ProfileManifest oldManifest(*getEvalState(), *profile); ProfileManifest oldManifest(*getEvalState(), *profile);
auto matchers = getMatchers(store); ProfileManifest newManifest = oldManifest;
ProfileManifest newManifest; auto matchingElementNames = getMatchingElementNames(oldManifest);
for (auto & [name, element] : oldManifest.elements) { if (matchingElementNames.empty()) {
if (!matches(*store, name, element, matchers)) { warn ("No packages to remove. Use 'nix profile list' to see the current profile.");
newManifest.elements.insert_or_assign(name, std::move(element)); return;
} else { }
notice("removing '%s'", element.identifier());
} for (auto & name : matchingElementNames) {
auto & element = oldManifest.elements[name];
notice("removing '%s'", element.identifier());
newManifest.elements.erase(name);
} }
auto removedCount = oldManifest.elements.size() - newManifest.elements.size(); auto removedCount = oldManifest.elements.size() - newManifest.elements.size();
@ -564,16 +665,6 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
removedCount, removedCount,
newManifest.elements.size()); newManifest.elements.size());
if (removedCount == 0) {
for (auto matcher: matchers) {
if (const Path * path = std::get_if<Path>(&matcher)) {
warn("'%s' does not match any paths", *path);
} else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)) {
warn("'%s' does not match any packages", regex->pattern);
}
}
warn ("Use 'nix profile list' to see the current profile.");
}
updateProfile(newManifest.build(store)); updateProfile(newManifest.build(store));
} }
}; };
@ -596,20 +687,20 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
{ {
ProfileManifest manifest(*getEvalState(), *profile); ProfileManifest manifest(*getEvalState(), *profile);
auto matchers = getMatchers(store);
Installables installables; Installables installables;
std::vector<ProfileElement *> elems; std::vector<ProfileElement *> elems;
auto matchedCount = 0;
auto upgradedCount = 0; auto upgradedCount = 0;
for (auto & [name, element] : manifest.elements) { auto matchingElementNames = getMatchingElementNames(manifest);
if (!matches(*store, name, element, matchers)) {
continue;
}
matchedCount++; if (matchingElementNames.empty()) {
warn("No packages to upgrade. Use 'nix profile list' to see the current profile.");
return;
}
for (auto & name : matchingElementNames) {
auto & element = manifest.elements[name];
if (!element.source) { if (!element.source) {
warn( warn(
@ -648,7 +739,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
assert(infop); assert(infop);
auto & info = *infop; auto & info = *infop;
if (element.source->lockedRef == info.flake.lockedRef) continue; if (info.flake.lockedRef.input.isLocked()
&& element.source->lockedRef == info.flake.lockedRef)
continue;
printInfo("upgrading '%s' from flake '%s' to '%s'", printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->lockedRef, info.flake.lockedRef); element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
@ -665,18 +758,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
} }
if (upgradedCount == 0) { if (upgradedCount == 0) {
if (matchedCount == 0) { warn("Found some packages but none of them could be upgraded.");
for (auto & matcher : matchers) { return;
if (const Path * path = std::get_if<Path>(&matcher)) {
warn("'%s' does not match any paths", *path);
} else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)) {
warn("'%s' does not match any packages", regex->pattern);
}
}
} else {
warn("Found some packages but none of them could be upgraded.");
}
warn ("Use 'nix profile list' to see the current profile.");
} }
auto builtPaths = builtPathsPerInstallable( auto builtPaths = builtPathsPerInstallable(

View file

@ -15,10 +15,10 @@ R""(
user flake:nixpkgs github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a user flake:nixpkgs github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
``` ```
and `nix flake info` will say: and `nix flake metadata` will say:
```console ```console
# nix flake info nixpkgs # nix flake metadata nixpkgs
Resolved URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a Resolved URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
Locked URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a Locked URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a

View file

@ -188,7 +188,9 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand
auto ref = parseFlakeRef(url); auto ref = parseFlakeRef(url);
auto lockedRef = parseFlakeRef(locked); auto lockedRef = parseFlakeRef(locked);
registry->remove(ref.input); registry->remove(ref.input);
auto [tree, resolved] = lockedRef.resolve(store).input.fetch(store); auto resolved = lockedRef.resolve(store).input.getAccessor(store).second;
if (!resolved.isLocked())
warn("flake '%s' is not locked", resolved.to_string());
fetchers::Attrs extraAttrs; fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir; if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
registry->add(ref.input, resolved, extraAttrs); registry->add(ref.input, resolved, extraAttrs);

View file

@ -216,6 +216,17 @@ expectStderr() {
return 0 return 0
} }
# Run a command and check whether the stderr matches stdin.
# Show a diff when output does not match.
# Usage:
#
# assertStderr nix profile remove nothing << EOF
# error: This error is expected
# EOF
assertStderr() {
diff -u /dev/stdin <($@ 2>/dev/null 2>&1)
}
needLocalStore() { needLocalStore() {
if [[ "$NIX_REMOTE" == "daemon" ]]; then if [[ "$NIX_REMOTE" == "daemon" ]]; then
skipTest "Cant run through the daemon ($1)" skipTest "Cant run through the daemon ($1)"

View file

@ -41,3 +41,14 @@ mkdir -p $TEST_ROOT/xyzzy $TEST_ROOT/foo
ln -sfn ../xyzzy $TEST_ROOT/foo/bar ln -sfn ../xyzzy $TEST_ROOT/foo/bar
printf 123 > $TEST_ROOT/xyzzy/default.nix printf 123 > $TEST_ROOT/xyzzy/default.nix
[[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]] [[ $(nix eval --impure --expr "import $TEST_ROOT/foo/bar") = 123 ]]
# Test --arg-from-file.
[[ "$(nix eval --raw --arg-from-file foo config.nix --expr '{ foo }: { inherit foo; }' foo)" = "$(cat config.nix)" ]]
# Check that special(-ish) files are drained.
if [[ -e /proc/version ]]; then
[[ "$(nix eval --raw --arg-from-file foo /proc/version --expr '{ foo }: { inherit foo; }' foo)" = "$(cat /proc/version)" ]]
fi
# Test --arg-from-stdin.
[[ "$(echo bla | nix eval --raw --arg-from-stdin foo --expr '{ foo }: { inherit foo; }' foo)" = bla ]]

View file

@ -564,6 +564,16 @@ nix flake lock "$flake3Dir"
nix flake update flake2/flake1 --flake "$flake3Dir" nix flake update flake2/flake1 --flake "$flake3Dir"
[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]] [[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
# Test updating multiple inputs.
nix flake lock "$flake3Dir" --override-input flake1 flake1/master/$hash1
nix flake lock "$flake3Dir" --override-input flake2/flake1 flake1/master/$hash1
[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash1 ]]
nix flake update flake1 flake2/flake1 --flake "$flake3Dir"
[[ $(jq -r .nodes.flake1.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
[[ $(jq -r .nodes.flake1_2.locked.rev "$flake3Dir/flake.lock") =~ $hash2 ]]
# Test 'nix flake metadata --json'. # Test 'nix flake metadata --json'.
nix flake metadata "$flake3Dir" --json | jq . nix flake metadata "$flake3Dir" --json | jq .

View file

@ -22,7 +22,7 @@ mkdir subdir
pushd subdir pushd subdir
success=("" . .# .#test ../subdir ../subdir#test "$PWD") success=("" . .# .#test ../subdir ../subdir#test "$PWD")
failure=("path:$PWD") failure=("path:$PWD" "../simple.nix")
for i in "${success[@]}"; do for i in "${success[@]}"; do
nix build $i || fail "flake should be found by searching up directories" nix build $i || fail "flake should be found by searching up directories"

View file

@ -56,3 +56,12 @@ echo Run Hello World! > $TEST_ROOT/dummy3/dir/executable
path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3) path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
hash3=$(nix-store -q --hash $path3) hash3=$(nix-store -q --hash $path3)
test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv" test "$hash3" = "sha256:08y3nm3mvn9qvskqnf13lfgax5lh73krxz4fcjd5cp202ggpw9nv"
rm -rf $TEST_ROOT/dummy3
mkdir -p $TEST_ROOT/dummy3
mkdir -p $TEST_ROOT/dummy3/dir
touch $TEST_ROOT/dummy3/dir/file
ln -s './hello/world.txt' $TEST_ROOT/dummy3/dir/symlink
path3=$(nix store add --mode git --hash-algo sha1 $TEST_ROOT/dummy3)
hash3=$(nix-store -q --hash $path3)
test "$hash3" = "sha256:1dwazas8irzpar89s8k2bnp72imfw7kgg4aflhhsfnicg8h428f3"

69
tests/functional/help.sh Normal file
View file

@ -0,0 +1,69 @@
source common.sh
clearStore
# test help output
nix-build --help
nix-shell --help
nix-env --help
nix-env --install --help
nix-env --upgrade --help
nix-env --uninstall --help
nix-env --set --help
nix-env --set-flag --help
nix-env --query --help
nix-env --switch-profile --help
nix-env --list-generations --help
nix-env --delete-generations --help
nix-env --switch-generation --help
nix-env --rollback --help
nix-store --help
nix-store --realise --help
nix-store --serve --help
nix-store --gc --help
nix-store --delete --help
nix-store --query --help
nix-store --add --help
nix-store --add-fixed --help
nix-store --verify --help
nix-store --verify-path --help
nix-store --repair-path --help
nix-store --dump --help
nix-store --restore --help
nix-store --export --help
nix-store --import --help
nix-store --optimise --help
nix-store --read-log --help
nix-store --dump-db --help
nix-store --load-db --help
nix-store --print-env --help
nix-store --generate-binary-cache-key --help
nix-channel --help
nix-collect-garbage --help
nix-copy-closure --help
nix-daemon --help
nix-hash --help
nix-instantiate --help
nix-prefetch-url --help
function subcommands() {
jq -r '
def recurse($prefix):
to_entries[] |
($prefix + [.key]) as $newPrefix |
(if .value | has("commands") then
($newPrefix, (.value.commands | recurse($newPrefix)))
else
$newPrefix
end);
.args.commands | recurse([]) | join(" ")
'
}
nix __dump-cli | subcommands | while IFS= read -r cmd; do
nix $cmd --help
done

View file

@ -0,0 +1,6 @@
error: undefined variable 'invalid'
at /pwd/lang/eval-fail-eol-1.nix:2:1:
1| # foo
2| invalid
| ^
3| # bar

View file

@ -0,0 +1,3 @@
# foo
invalid
# bar

Some files were not shown because too many files have changed in this diff Show more