mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 05:56:15 +02:00
Merge remote-tracking branch 'upstream/master' into store-path-complete-construction
This commit is contained in:
commit
eb76b35efa
224 changed files with 4699 additions and 3115 deletions
3
.clang-tidy
Normal file
3
.clang-tidy
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
# We use pointers to aggregates in a couple of places, intentionally.
|
||||||
|
# void * would look weird.
|
||||||
|
Checks: '-bugprone-sizeof-expression'
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v2.3.0
|
uses: zeebe-io/backport-action@v2.4.1
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -94,6 +94,7 @@ perl/Makefile.config
|
||||||
/tests/functional/ca/config.nix
|
/tests/functional/ca/config.nix
|
||||||
/tests/functional/dyn-drv/config.nix
|
/tests/functional/dyn-drv/config.nix
|
||||||
/tests/functional/repl-result-out
|
/tests/functional/repl-result-out
|
||||||
|
/tests/functional/debugger-test-out
|
||||||
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
||||||
|
|
||||||
# /tests/functional/lang/
|
# /tests/functional/lang/
|
||||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
2.20.0
|
2.21.0
|
||||||
|
|
|
@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||||
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
||||||
- Unit tests – [`src/*/tests`](./src/)
|
- Unit tests – [`src/*/tests`](./src/)
|
||||||
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
||||||
- [ ] User documentation in the [manual](..doc/manual/src)
|
- [ ] User documentation in the [manual](./doc/manual/src)
|
||||||
- [ ] API documentation in header files
|
- [ ] API documentation in header files
|
||||||
- [ ] Code and comments are self-explanatory
|
- [ ] Code and comments are self-explanatory
|
||||||
- [ ] Commit message explains **why** the change was made
|
- [ ] Commit message explains **why** the change was made
|
||||||
|
|
25
Makefile
25
Makefile
|
@ -47,6 +47,17 @@ makefiles += \
|
||||||
tests/functional/plugins/local.mk
|
tests/functional/plugins/local.mk
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# Some makefiles require access to built programs and must be included late.
|
||||||
|
makefiles-late =
|
||||||
|
|
||||||
|
ifeq ($(ENABLE_DOC_GEN), yes)
|
||||||
|
makefiles-late += doc/manual/local.mk
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||||
|
makefiles-late += doc/internal-api/local.mk
|
||||||
|
endif
|
||||||
|
|
||||||
# Miscellaneous global Flags
|
# Miscellaneous global Flags
|
||||||
|
|
||||||
OPTIMIZE = 1
|
OPTIMIZE = 1
|
||||||
|
@ -95,24 +106,16 @@ installcheck:
|
||||||
@exit 1
|
@exit 1
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Documentation or else fallback stub rules.
|
# Documentation fallback stub rules.
|
||||||
#
|
|
||||||
# The documentation makefiles be included after `mk/lib.mk` so rules
|
|
||||||
# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like
|
|
||||||
# variables, unfortunately.
|
|
||||||
|
|
||||||
ifeq ($(ENABLE_DOC_GEN), yes)
|
ifneq ($(ENABLE_DOC_GEN), yes)
|
||||||
$(eval $(call include-sub-makefile, doc/manual/local.mk))
|
|
||||||
else
|
|
||||||
.PHONY: manual-html manpages
|
.PHONY: manual-html manpages
|
||||||
manual-html manpages:
|
manual-html manpages:
|
||||||
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
|
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
|
||||||
@exit 1
|
@exit 1
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
ifneq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||||
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
|
||||||
else
|
|
||||||
.PHONY: internal-api-html
|
.PHONY: internal-api-html
|
||||||
internal-api-html:
|
internal-api-html:
|
||||||
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
||||||
|
|
|
@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
||||||
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
||||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
||||||
|
|
||||||
|
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
|
||||||
|
# to -O2 otherwise, which we don't want to have hardcoded
|
||||||
|
CFLAGS=${CFLAGS-""}
|
||||||
|
CXXFLAGS=${CXXFLAGS-""}
|
||||||
|
|
||||||
AC_PROG_CC
|
AC_PROG_CC
|
||||||
AC_PROG_CXX
|
AC_PROG_CXX
|
||||||
|
@ -351,7 +355,7 @@ fi
|
||||||
AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
|
AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
|
||||||
|
|
||||||
# Look for gtest.
|
# Look for gtest.
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main])
|
||||||
|
|
||||||
# Look for rapidcheck.
|
# Look for rapidcheck.
|
||||||
PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest])
|
PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest])
|
||||||
|
|
|
@ -6,6 +6,8 @@ additional-css = ["custom.css"]
|
||||||
additional-js = ["redirects.js"]
|
additional-js = ["redirects.js"]
|
||||||
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||||
git-repository-url = "https://github.com/NixOS/nix"
|
git-repository-url = "https://github.com/NixOS/nix"
|
||||||
|
fold.enable = true
|
||||||
|
fold.level = 1
|
||||||
|
|
||||||
[preprocessor.anchors]
|
[preprocessor.anchors]
|
||||||
renderers = ["html"]
|
renderers = ["html"]
|
||||||
|
|
|
@ -93,9 +93,6 @@ let
|
||||||
|
|
||||||
maybeProse =
|
maybeProse =
|
||||||
# FIXME: this is a horrible hack to keep `nix help-stores` working.
|
# FIXME: this is a horrible hack to keep `nix help-stores` working.
|
||||||
# the correct answer to this is to remove that command and replace it
|
|
||||||
# by statically generated manpages or the output of something like `nix
|
|
||||||
# store info <store type>`.
|
|
||||||
let
|
let
|
||||||
help-stores = ''
|
help-stores = ''
|
||||||
${index}
|
${index}
|
||||||
|
@ -121,7 +118,7 @@ let
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
optionalString (details ? doc) (
|
optionalString (details ? doc) (
|
||||||
if match "@store-types@" details.doc != [ ]
|
if match ".*@store-types@.*" details.doc != null
|
||||||
then help-stores
|
then help-stores
|
||||||
else details.doc
|
else details.doc
|
||||||
);
|
);
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes
|
|
||||||
prs: 9547
|
|
||||||
---
|
|
||||||
|
|
||||||
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
|
|
||||||
Previously this only worked for schemes whose URIs used the `://` syntax.
|
|
40
doc/manual/rl-next/better-errors-in-nix-repl.md
Normal file
40
doc/manual/rl-next/better-errors-in-nix-repl.md
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
---
|
||||||
|
synopsis: Concise error printing in `nix repl`
|
||||||
|
prs: 9928
|
||||||
|
---
|
||||||
|
|
||||||
|
Previously, if an element of a list or attribute set threw an error while
|
||||||
|
evaluating, `nix repl` would print the entire error (including source location
|
||||||
|
information) inline. This output was clumsy and difficult to parse:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||||
|
{ err = «error:
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
at «string»:1:9:
|
||||||
|
1| { err = builtins.throw "uh oh!"; }
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: uh oh!»; }
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, only the error message is displayed, making the output much more readable.
|
||||||
|
```
|
||||||
|
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||||
|
{ err = «error: uh oh!»; }
|
||||||
|
```
|
||||||
|
|
||||||
|
However, if the whole expression being evaluated throws an error, source
|
||||||
|
locations and (if applicable) a stack trace are printed, just like you'd expect:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> builtins.throw "uh oh!"
|
||||||
|
error:
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
at «string»:1:1:
|
||||||
|
1| builtins.throw "uh oh!"
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: uh oh!
|
||||||
|
```
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Include cgroup stats when building through the daemon
|
|
||||||
prs: 9598
|
|
||||||
---
|
|
||||||
|
|
||||||
Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng,
|
|
||||||
if both sides of the connection are this version of Nix or newer.
|
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
---
|
||||||
|
synopsis: "`--debugger` can now access bindings from `let` expressions"
|
||||||
|
prs: 9918
|
||||||
|
issues: 8827.
|
||||||
|
---
|
||||||
|
|
||||||
|
Breakpoints and errors in the bindings of a `let` expression can now access
|
||||||
|
those bindings in the debugger. Previously, only the body of `let` expressions
|
||||||
|
could access those bindings.
|
|
@ -1,6 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Fix handling of truncated `.drv` files.
|
|
||||||
prs: 9673
|
|
||||||
---
|
|
||||||
|
|
||||||
Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing.
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
synopsis: The `--debugger` will start more reliably in `let` expressions and function calls
|
||||||
|
prs: 9917
|
||||||
|
issues: 6649
|
||||||
|
---
|
||||||
|
|
||||||
|
Previously, if you attempted to evaluate this file with the debugger:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
a = builtins.trace "before inner break" (
|
||||||
|
builtins.break "hello"
|
||||||
|
);
|
||||||
|
b = builtins.trace "before outer break" (
|
||||||
|
builtins.break a
|
||||||
|
);
|
||||||
|
in
|
||||||
|
b
|
||||||
|
```
|
||||||
|
|
||||||
|
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
|
||||||
|
it to `:continue`, it would skip over the `builtins.break "hello"` expression
|
||||||
|
entirely.
|
||||||
|
|
||||||
|
Now, Nix will correctly enter the debugger at both breakpoints.
|
|
@ -1,7 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Reduce eval memory usage and wall time
|
|
||||||
prs: 9658
|
|
||||||
---
|
|
||||||
|
|
||||||
Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines.
|
|
||||||
This reduces memory usage during eval by around 2% and wall time by around 3%.
|
|
|
@ -1,12 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Add new `eval-system` setting
|
|
||||||
prs: 4093
|
|
||||||
---
|
|
||||||
|
|
||||||
Add a new `eval-system` option.
|
|
||||||
Unlike `system`, it just overrides the value of `builtins.currentSystem`.
|
|
||||||
This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system.
|
|
||||||
In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense.
|
|
||||||
|
|
||||||
`eval-system` only takes effect if it is non-empty.
|
|
||||||
If empty (the default) `system` is used as before, so there is no breakage.
|
|
|
@ -1,18 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: "Nix now uses `libgit2` for Git fetching"
|
|
||||||
prs:
|
|
||||||
- 9240
|
|
||||||
- 9241
|
|
||||||
- 9258
|
|
||||||
- 9480
|
|
||||||
issues:
|
|
||||||
- 5313
|
|
||||||
---
|
|
||||||
|
|
||||||
Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox.
|
|
||||||
The existing implementation based on the Git CLI had issues regarding reproducibility and performance.
|
|
||||||
|
|
||||||
Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control.
|
|
||||||
|
|
||||||
Known issues:
|
|
||||||
- The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window.
|
|
|
@ -1,23 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Rename hash format `base32` to `nix32`
|
|
||||||
prs: 9452
|
|
||||||
---
|
|
||||||
|
|
||||||
Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for
|
|
||||||
[Base32](https://en.wikipedia.org/wiki/Base32).
|
|
||||||
|
|
||||||
## Deprecation: Use `nix32` instead of `base32` as `toHashFormat`
|
|
||||||
|
|
||||||
For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from`
|
|
||||||
parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value
|
|
||||||
remains as a deprecated alias for `"base32"`. Please convert your code from:
|
|
||||||
|
|
||||||
```nix
|
|
||||||
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";}
|
|
||||||
```
|
|
||||||
|
|
||||||
to
|
|
||||||
|
|
||||||
```nix
|
|
||||||
builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";}
|
|
||||||
```
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: import-from-derivation builds the derivation in the build store
|
|
||||||
prs: 9661
|
|
||||||
---
|
|
||||||
|
|
||||||
When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option.
|
|
||||||
|
|
||||||
Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures.
|
|
10
doc/manual/rl-next/leading-period.md
Normal file
10
doc/manual/rl-next/leading-period.md
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
---
|
||||||
|
synopsis: Store paths are allowed to start with `.`
|
||||||
|
issues: 912
|
||||||
|
prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224
|
||||||
|
---
|
||||||
|
|
||||||
|
Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
|
||||||
|
From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
|
||||||
|
|
||||||
|
Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Mounted SSH Store
|
|
||||||
issues: 7890
|
|
||||||
prs: 7912
|
|
||||||
---
|
|
||||||
|
|
||||||
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
|
||||||
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
|
|
@ -1,7 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Rename to `nix config show`
|
|
||||||
issues: 7672
|
|
||||||
prs: 9477
|
|
||||||
---
|
|
||||||
|
|
||||||
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
|
|
@ -1,6 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Fix `nix-env --query --drv-path --json`
|
|
||||||
prs: 9257
|
|
||||||
---
|
|
||||||
|
|
||||||
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
|
|
@ -1,47 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Add `nix hash convert`
|
|
||||||
prs: 9452
|
|
||||||
---
|
|
||||||
|
|
||||||
New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track
|
|
||||||
to stabilization! Examples:
|
|
||||||
|
|
||||||
- Convert the hash to `nix32`.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
|
||||||
vw46m23bizj4n8afrc0fj19wrp7mj3c0
|
|
||||||
```
|
|
||||||
`nix32` is a base32 encoding with a nix-specific character set.
|
|
||||||
Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input
|
|
||||||
hash.
|
|
||||||
- Convert the hash to the `sri` format that includes an algorithm specification:
|
|
||||||
```bash
|
|
||||||
nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
|
||||||
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
|
||||||
```
|
|
||||||
or with an explicit `-to` format:
|
|
||||||
```bash
|
|
||||||
nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df"
|
|
||||||
sha1-gA1Zz808BekAy04hS+SPa4hqCN8=
|
|
||||||
```
|
|
||||||
- Assert the input format of the hash:
|
|
||||||
```bash
|
|
||||||
nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
|
|
||||||
error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32'
|
|
||||||
nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s"
|
|
||||||
sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=
|
|
||||||
```
|
|
||||||
|
|
||||||
The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion.
|
|
||||||
|
|
||||||
## Related Deprecations
|
|
||||||
|
|
||||||
The following commands are still available but will emit a deprecation warning. Please convert your code to
|
|
||||||
`nix hash convert`:
|
|
||||||
|
|
||||||
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
|
||||||
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
|
||||||
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
|
||||||
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2`
|
|
||||||
or even just `nix hash convert $hash1 $hash2` instead.
|
|
|
@ -1,8 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: "`nix profile` now allows referring to elements by human-readable name"
|
|
||||||
prs: 8678
|
|
||||||
---
|
|
||||||
|
|
||||||
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed.
|
|
||||||
|
|
||||||
**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix.
|
|
37
doc/manual/rl-next/reduce-debugger-clutter.md
Normal file
37
doc/manual/rl-next/reduce-debugger-clutter.md
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
---
|
||||||
|
synopsis: "Visual clutter in `--debugger` is reduced"
|
||||||
|
prs: 9919
|
||||||
|
---
|
||||||
|
|
||||||
|
Before:
|
||||||
|
```
|
||||||
|
info: breakpoint reached
|
||||||
|
|
||||||
|
|
||||||
|
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||||
|
|
||||||
|
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||||
|
|
||||||
|
nix-repl> :continue
|
||||||
|
error: uh oh
|
||||||
|
|
||||||
|
|
||||||
|
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||||
|
|
||||||
|
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||||
|
|
||||||
|
nix-repl>
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
info: breakpoint reached
|
||||||
|
|
||||||
|
Nix 2.20.0pre20231222_dirty debugger
|
||||||
|
Type :? for help.
|
||||||
|
nix-repl> :continue
|
||||||
|
error: uh oh
|
||||||
|
|
||||||
|
nix-repl>
|
||||||
|
```
|
|
@ -1,42 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Source locations are printed more consistently in errors
|
|
||||||
issues: 561
|
|
||||||
prs: 9555
|
|
||||||
---
|
|
||||||
|
|
||||||
Source location information is now included in error messages more
|
|
||||||
consistently. Given this code:
|
|
||||||
|
|
||||||
```nix
|
|
||||||
let
|
|
||||||
attr = {foo = "bar";};
|
|
||||||
key = {};
|
|
||||||
in
|
|
||||||
attr.${key}
|
|
||||||
```
|
|
||||||
|
|
||||||
Previously, Nix would show this unhelpful message when attempting to evaluate
|
|
||||||
it:
|
|
||||||
|
|
||||||
```
|
|
||||||
error:
|
|
||||||
… while evaluating an attribute name
|
|
||||||
|
|
||||||
error: value is a set while a string was expected
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, the error message displays where the problematic value was found:
|
|
||||||
|
|
||||||
```
|
|
||||||
error:
|
|
||||||
… while evaluating an attribute name
|
|
||||||
|
|
||||||
at bad.nix:4:11:
|
|
||||||
|
|
||||||
3| key = {};
|
|
||||||
4| in attr.${key}
|
|
||||||
| ^
|
|
||||||
5|
|
|
||||||
|
|
||||||
error: value is a set while a string was expected
|
|
||||||
```
|
|
|
@ -1,32 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Some stack overflow segfaults are fixed
|
|
||||||
issues: 9616
|
|
||||||
prs: 9617
|
|
||||||
---
|
|
||||||
|
|
||||||
The number of nested function calls has been restricted, to detect and report
|
|
||||||
infinite function call recursions. The default maximum call depth is 10,000 and
|
|
||||||
can be set with [the `max-call-depth`
|
|
||||||
option](@docroot@/command-ref/conf-file.md#conf-max-call-depth).
|
|
||||||
|
|
||||||
This fixes segfaults or the following unhelpful error message in many cases:
|
|
||||||
|
|
||||||
error: stack overflow (possible infinite recursion)
|
|
||||||
|
|
||||||
Before:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ nix-instantiate --eval --expr '(x: x x) (x: x x)'
|
|
||||||
Segmentation fault: 11
|
|
||||||
```
|
|
||||||
|
|
||||||
After:
|
|
||||||
|
|
||||||
```
|
|
||||||
$ nix-instantiate --eval --expr '(x: x x) (x: x x)'
|
|
||||||
error: stack overflow
|
|
||||||
|
|
||||||
at «string»:1:14:
|
|
||||||
1| (x: x x) (x: x x)
|
|
||||||
| ^
|
|
||||||
```
|
|
|
@ -1,31 +0,0 @@
|
||||||
---
|
|
||||||
synopsis: Better error reporting for `with` expressions
|
|
||||||
prs: 9658
|
|
||||||
---
|
|
||||||
|
|
||||||
`with` expressions using non-attrset values to resolve variables are now reported with proper positions.
|
|
||||||
|
|
||||||
Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated:
|
|
||||||
|
|
||||||
```
|
|
||||||
nix-repl> with 1; a
|
|
||||||
error:
|
|
||||||
… <borked>
|
|
||||||
|
|
||||||
at «none»:0: (source not available)
|
|
||||||
|
|
||||||
error: value is an integer while a set was expected
|
|
||||||
```
|
|
||||||
|
|
||||||
Now position information is preserved and reported as with most other errors:
|
|
||||||
|
|
||||||
```
|
|
||||||
nix-repl> with 1; a
|
|
||||||
error:
|
|
||||||
… while evaluating the first subexpression of a with expression
|
|
||||||
at «string»:1:1:
|
|
||||||
1| with 1; a
|
|
||||||
| ^
|
|
||||||
|
|
||||||
error: value is an integer while a set was expected
|
|
||||||
```
|
|
|
@ -104,7 +104,10 @@
|
||||||
- [Channels](command-ref/files/channels.md)
|
- [Channels](command-ref/files/channels.md)
|
||||||
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
||||||
- [Architecture and Design](architecture/architecture.md)
|
- [Architecture and Design](architecture/architecture.md)
|
||||||
- [Protocols](protocols/index.md)
|
- [Formats and Protocols](protocols/index.md)
|
||||||
|
- [JSON Formats](protocols/json/index.md)
|
||||||
|
- [Store Object Info](protocols/json/store-object-info.md)
|
||||||
|
- [Derivation](protocols/json/derivation.md)
|
||||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||||
- [Store Path Specification](protocols/store-path.md)
|
- [Store Path Specification](protocols/store-path.md)
|
||||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||||
|
@ -118,6 +121,7 @@
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
- [Release Notes](release-notes/index.md)
|
- [Release Notes](release-notes/index.md)
|
||||||
{{#include ./SUMMARY-rl-next.md}}
|
{{#include ./SUMMARY-rl-next.md}}
|
||||||
|
- [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md)
|
||||||
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
||||||
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
||||||
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
||||||
|
|
|
@ -36,5 +36,6 @@
|
||||||
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||||
|
|
||||||
/protocols/protocols /protocols 301!
|
/protocols/protocols /protocols 301!
|
||||||
|
/json/* /protocols/json/:splat 301!
|
||||||
|
|
||||||
/release-notes/release-notes /release-notes 301!
|
/release-notes/release-notes /release-notes 301!
|
|
@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
|
||||||
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
|
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
|
||||||
Delete all old generations of profiles.
|
Delete all old generations of profiles.
|
||||||
|
|
||||||
This is the equivalent of invoking `nix-env --delete-generations old` on each found profile.
|
This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile.
|
||||||
|
|
||||||
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
|
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
|
||||||
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
||||||
|
|
|
@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile.
|
||||||
|
|
||||||
*generations* can be a one of the following:
|
*generations* can be a one of the following:
|
||||||
|
|
||||||
- <span id="generations-list">`<number>...`</span>:\
|
- <span id="generations-list">[`<number>...`](#generations-list)</span>:\
|
||||||
A list of generation numbers, each one a separate command-line argument.
|
A list of generation numbers, each one a separate command-line argument.
|
||||||
|
|
||||||
Delete exactly the profile generations given by their generation number.
|
Delete exactly the profile generations given by their generation number.
|
||||||
Deleting the current generation is not allowed.
|
Deleting the current generation is not allowed.
|
||||||
|
|
||||||
- The special value <span id="generations-old">`old`</span>
|
- <span id="generations-old">[The special value `old`](#generations-old)</span>
|
||||||
|
|
||||||
Delete all generations except the current one.
|
Delete all generations except the current one.
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile.
|
||||||
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
||||||
> They will also be deleted.
|
> They will also be deleted.
|
||||||
|
|
||||||
- <span id="generations-time">`<number>d`</span>:\
|
- <span id="generations-time">[`<number>d`](#generations-time)</span>:\
|
||||||
The last *number* days
|
The last *number* days
|
||||||
|
|
||||||
*Example*: `30d`
|
*Example*: `30d`
|
||||||
|
@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile.
|
||||||
Delete all generations created more than *number* days ago, except the most recent one of them.
|
Delete all generations created more than *number* days ago, except the most recent one of them.
|
||||||
This allows rolling back to generations that were available within the specified period.
|
This allows rolling back to generations that were available within the specified period.
|
||||||
|
|
||||||
- <span id="generations-count">`+<number>`</span>:\
|
- <span id="generations-count">[`+<number>`](#generations-count)</span>:\
|
||||||
The last *number* generations up to the present
|
The last *number* generations up to the present
|
||||||
|
|
||||||
*Example*: `+5`
|
*Example*: `+5`
|
||||||
|
|
|
@ -304,7 +304,6 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
|
||||||
### Build process
|
### Build process
|
||||||
|
|
||||||
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
||||||
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
|
||||||
|
|
||||||
## Branches
|
## Branches
|
||||||
|
|
||||||
|
|
|
@ -127,7 +127,7 @@
|
||||||
non-[fixed-output](#gloss-fixed-output-derivation)
|
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||||
derivation.
|
derivation.
|
||||||
|
|
||||||
- [output-addressed store object]{#gloss-output-addressed-store-object}
|
- [content-addressed store object]{#gloss-content-addressed-store-object}
|
||||||
|
|
||||||
A [store object] whose [store path] is determined by its contents.
|
A [store object] whose [store path] is determined by its contents.
|
||||||
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
||||||
|
@ -156,6 +156,11 @@
|
||||||
builder can rely on external inputs such as the network or the
|
builder can rely on external inputs such as the network or the
|
||||||
system time) but the Nix model assumes it.
|
system time) but the Nix model assumes it.
|
||||||
|
|
||||||
|
- [impure derivation]{#gloss-impure-derivation}
|
||||||
|
|
||||||
|
[An experimental feature](#@docroot@/contributing/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||||
|
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
||||||
|
|
||||||
- [Nix database]{#gloss-nix-database}
|
- [Nix database]{#gloss-nix-database}
|
||||||
|
|
||||||
An SQlite database to track [reference]s between [store object]s.
|
An SQlite database to track [reference]s between [store object]s.
|
||||||
|
@ -167,12 +172,13 @@
|
||||||
|
|
||||||
- [Nix expression]{#gloss-nix-expression}
|
- [Nix expression]{#gloss-nix-expression}
|
||||||
|
|
||||||
A high-level description of software packages and compositions
|
1. Commonly, a high-level description of software packages and compositions
|
||||||
thereof. Deploying software using Nix entails writing Nix
|
thereof. Deploying software using Nix entails writing Nix
|
||||||
expressions for your packages. Nix expressions specify [derivations][derivation],
|
expressions for your packages. Nix expressions specify [derivations][derivation],
|
||||||
which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation].
|
which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation].
|
||||||
These derivations can then be [realised][realise] to produce
|
These derivations can then be [realised][realise] to produce [outputs][output].
|
||||||
[outputs][output].
|
|
||||||
|
2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression.
|
||||||
|
|
||||||
- [reference]{#gloss-reference}
|
- [reference]{#gloss-reference}
|
||||||
|
|
||||||
|
@ -279,7 +285,7 @@
|
||||||
|
|
||||||
- [package attribute set]{#package-attribute-set}
|
- [package attribute set]{#package-attribute-set}
|
||||||
|
|
||||||
An [attribute set] containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as
|
An [attribute set](@docroot@/language/values.md#attribute-set) containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as
|
||||||
- attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output),
|
- attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output),
|
||||||
- attributes that declare something about how the package is supposed to be installed or used,
|
- attributes that declare something about how the package is supposed to be installed or used,
|
||||||
- other metadata or arbitrary attributes.
|
- other metadata or arbitrary attributes.
|
||||||
|
@ -302,3 +308,6 @@
|
||||||
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
||||||
|
|
||||||
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
||||||
|
|
||||||
|
|
||||||
|
[Nix language]: ./language/index.md
|
||||||
|
|
|
@ -32,11 +32,15 @@
|
||||||
your distribution does not provide it, please install it from
|
your distribution does not provide it, please install it from
|
||||||
<http://www.sqlite.org/>.
|
<http://www.sqlite.org/>.
|
||||||
|
|
||||||
- The [Boehm garbage collector](http://www.hboehm.info/gc/) to reduce
|
- The [Boehm garbage collector (`bdw-gc`)](http://www.hboehm.info/gc/) to reduce
|
||||||
the evaluator’s memory consumption (optional). To enable it, install
|
the evaluator’s memory consumption (optional).
|
||||||
|
|
||||||
|
To enable it, install
|
||||||
`pkgconfig` and the Boehm garbage collector, and pass the flag
|
`pkgconfig` and the Boehm garbage collector, and pass the flag
|
||||||
`--enable-gc` to `configure`.
|
`--enable-gc` to `configure`.
|
||||||
|
|
||||||
|
For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied.
|
||||||
|
|
||||||
- The `boost` library of version 1.66.0 or higher. It can be obtained
|
- The `boost` library of version 1.66.0 or higher. It can be obtained
|
||||||
from the official web site <https://www.boost.org/>.
|
from the official web site <https://www.boost.org/>.
|
||||||
|
|
||||||
|
|
|
@ -2,48 +2,39 @@
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md).
|
> These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md).
|
||||||
|
|
||||||
First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed:
|
Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-channel --list
|
$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version"
|
||||||
```
|
|
||||||
|
|
||||||
By default this should return an entry for Nixpkgs:
|
|
||||||
|
|
||||||
```console
|
|
||||||
nixpkgs https://nixos.org/channels/nixpkgs-23.05
|
|
||||||
```
|
|
||||||
|
|
||||||
Check which Nix version will be installed:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version"
|
|
||||||
nix (Nix) 2.18.1
|
nix (Nix) 2.18.1
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Warning**
|
> **Warning**
|
||||||
>
|
>
|
||||||
> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema!
|
> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema!
|
||||||
> Reverting to an older version of Nix may therefore require purging the store database before it can be used.
|
> Reverting to an older version of Nix may therefore require purging the store database before it can be used.
|
||||||
|
|
||||||
Update the channel entry:
|
## Linux multi-user
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-channel --remove nixpkgs
|
$ sudo su
|
||||||
$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs
|
# nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
# systemctl daemon-reload
|
||||||
|
# systemctl restart nix-daemon
|
||||||
```
|
```
|
||||||
|
|
||||||
Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c
|
## macOS multi-user
|
||||||
'nix-channel --update &&
|
|
||||||
nix-env --install --attr nixpkgs.nix &&
|
|
||||||
launchctl remove org.nixos.nix-daemon &&
|
|
||||||
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'`
|
|
||||||
|
|
||||||
Single-user installations of Nix should run this: `nix-channel --update;
|
```console
|
||||||
nix-env --install --attr nixpkgs.nix nixpkgs.cacert`
|
$ sudo nix-env --install --file '<nixpkgs>' --attr nix -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
$ sudo launchctl remove org.nixos.nix-daemon
|
||||||
|
$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||||
|
```
|
||||||
|
|
||||||
Multi-user Nix users on Linux should run this with sudo: `nix-channel
|
## Single-user all platforms
|
||||||
--update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl
|
|
||||||
daemon-reload; systemctl restart nix-daemon`
|
```console
|
||||||
|
$ nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
```
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Import From Derivation
|
# Import From Derivation
|
||||||
|
|
||||||
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object).
|
The value of a Nix expression can depend on the contents of a [store object].
|
||||||
|
|
||||||
|
[store object]: @docroot@/glossary.md#gloss-store-object
|
||||||
|
|
||||||
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
||||||
|
|
||||||
|
|
|
@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths.
|
||||||
>
|
>
|
||||||
> *string* `+` *string*
|
> *string* `+` *string*
|
||||||
|
|
||||||
Concatenate two [string]s and merge their string contexts.
|
Concatenate two [strings][string] and merge their string contexts.
|
||||||
|
|
||||||
[String concatenation]: #string-concatenation
|
[String concatenation]: #string-concatenation
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts.
|
||||||
>
|
>
|
||||||
> *path* `+` *path*
|
> *path* `+` *path*
|
||||||
|
|
||||||
Concatenate two [path]s.
|
Concatenate two [paths][path].
|
||||||
The result is a path.
|
The result is a path.
|
||||||
|
|
||||||
[Path concatenation]: #path-concatenation
|
[Path concatenation]: #path-concatenation
|
||||||
|
@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is
|
||||||
|
|
||||||
Comparison is
|
Comparison is
|
||||||
|
|
||||||
- [arithmetic] for [number]s
|
- [arithmetic] for [numbers][number]
|
||||||
- lexicographic for [string]s and [path]s
|
- lexicographic for [strings][string] and [paths][path]
|
||||||
- item-wise lexicographic for [list]s:
|
- item-wise lexicographic for [lists][list]:
|
||||||
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||||
|
|
||||||
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||||
|
@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi
|
||||||
| *a* `>` *b* | *b* `<` *a* |
|
| *a* `>` *b* | *b* `<` *a* |
|
||||||
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||||
|
|
||||||
[Comparison]: #comparison-operators
|
[Comparison]: #comparison
|
||||||
|
|
||||||
## Equality
|
## Equality
|
||||||
|
|
||||||
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated.
|
||||||
- Comparison of [function]s always returns `false`.
|
- Comparison of [functions][function] always returns `false`.
|
||||||
- Numbers are type-compatible, see [arithmetic] operators.
|
- Numbers are type-compatible, see [arithmetic] operators.
|
||||||
- Floating point numbers only differ up to a limited precision.
|
- Floating point numbers only differ up to a limited precision.
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,8 @@ Rather than writing
|
||||||
|
|
||||||
(where `freetype` is a [derivation]), you can instead write
|
(where `freetype` is a [derivation]), you can instead write
|
||||||
|
|
||||||
|
[derivation]: ../glossary.md#gloss-derivation
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
"--with-freetype2-library=${freetype}/lib"
|
"--with-freetype2-library=${freetype}/lib"
|
||||||
```
|
```
|
||||||
|
@ -189,7 +191,7 @@ If neither is present, an error is thrown.
|
||||||
> "${a}"
|
> "${a}"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
> error: cannot coerce a set to a string
|
> error: cannot coerce a set to a string: { }
|
||||||
>
|
>
|
||||||
> at «string»:4:2:
|
> at «string»:4:2:
|
||||||
>
|
>
|
||||||
|
|
71
doc/manual/src/protocols/json/derivation.md
Normal file
71
doc/manual/src/protocols/json/derivation.md
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
# Derivation JSON Format
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
The JSON serialization of a
|
||||||
|
[derivations](@docroot@/glossary.md#gloss-store-derivation)
|
||||||
|
is a JSON object with the following fields:
|
||||||
|
|
||||||
|
* `name`:
|
||||||
|
The name of the derivation.
|
||||||
|
This is used when calculating the store paths of the derivation's outputs.
|
||||||
|
|
||||||
|
* `outputs`:
|
||||||
|
Information about the output paths of the derivation.
|
||||||
|
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields:
|
||||||
|
|
||||||
|
* `path`: The output path.
|
||||||
|
|
||||||
|
* `hashAlgo`:
|
||||||
|
For fixed-output derivations, the hashing algorithm (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a NAR hash rather than a flat file hash.
|
||||||
|
|
||||||
|
* `hash`:
|
||||||
|
For fixed-output derivations, the expected content hash in base-16.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "outputs": {
|
||||||
|
> "out": {
|
||||||
|
> "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source",
|
||||||
|
> "hashAlgo": "r:sha256",
|
||||||
|
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
||||||
|
> }
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
* `inputSrcs`:
|
||||||
|
A list of store paths on which this derivation depends.
|
||||||
|
|
||||||
|
* `inputDrvs`:
|
||||||
|
A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "inputDrvs": {
|
||||||
|
> "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
||||||
|
> "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||||
|
|
||||||
|
* `system`:
|
||||||
|
The system type on which this derivation is to be built
|
||||||
|
(e.g. `x86_64-linux`).
|
||||||
|
|
||||||
|
* `builder`:
|
||||||
|
The absolute path of the program to be executed to run the build.
|
||||||
|
Typically this is the `bash` shell
|
||||||
|
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
||||||
|
|
||||||
|
* `args`:
|
||||||
|
The command-line arguments passed to the `builder`.
|
||||||
|
|
||||||
|
* `env`:
|
||||||
|
The environment passed to the `builder`.
|
98
doc/manual/src/protocols/json/store-object-info.md
Normal file
98
doc/manual/src/protocols/json/store-object-info.md
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
# Store object info JSON format
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
Info about a [store object].
|
||||||
|
|
||||||
|
* `path`:
|
||||||
|
|
||||||
|
[Store path][store path] to the given store object.
|
||||||
|
|
||||||
|
* `narHash`:
|
||||||
|
|
||||||
|
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||||
|
|
||||||
|
* `narSize`:
|
||||||
|
|
||||||
|
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||||
|
|
||||||
|
* `references`:
|
||||||
|
|
||||||
|
An array of [store paths][store path], possibly including this one.
|
||||||
|
|
||||||
|
* `ca` (optional):
|
||||||
|
|
||||||
|
Content address of this store object's file system object, used to compute its store path.
|
||||||
|
|
||||||
|
[store path]: @docroot@/glossary.md#gloss-store-path
|
||||||
|
[file system object]: @docroot@/store/file-system-object.md
|
||||||
|
[Nix Archive]: @docroot@/glossary.md#gloss-nar
|
||||||
|
|
||||||
|
## Impure fields
|
||||||
|
|
||||||
|
These are not intrinsic properties of the store object.
|
||||||
|
In other words, the same store object residing in different store could have different values for these properties.
|
||||||
|
|
||||||
|
* `deriver` (optional):
|
||||||
|
|
||||||
|
The path to the [derivation] from which this store object is produced.
|
||||||
|
|
||||||
|
[derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
|
* `registrationTime` (optional):
|
||||||
|
|
||||||
|
When this derivation was added to the store.
|
||||||
|
|
||||||
|
* `ultimate` (optional):
|
||||||
|
|
||||||
|
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||||
|
|
||||||
|
* `signatures` (optional):
|
||||||
|
|
||||||
|
Signatures claiming that this store object is what it claims to be.
|
||||||
|
Not relevant for [content-addressed] store objects,
|
||||||
|
but useful for [input-addressed] store objects.
|
||||||
|
|
||||||
|
[content-addressed]: @docroot@/glossary.md#gloss-content-addressed-store-object
|
||||||
|
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
|
||||||
|
|
||||||
|
### `.narinfo` extra fields
|
||||||
|
|
||||||
|
This meta data is specific to the "binary cache" family of Nix store types.
|
||||||
|
This information is not intrinsic to the store object, but about how it is stored.
|
||||||
|
|
||||||
|
* `url`:
|
||||||
|
|
||||||
|
Where to download a compressed archive of the file system objects of this store object.
|
||||||
|
|
||||||
|
* `compression`:
|
||||||
|
|
||||||
|
The compression format that the archive is in.
|
||||||
|
|
||||||
|
* `fileHash`:
|
||||||
|
|
||||||
|
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||||
|
|
||||||
|
* `fileSize`:
|
||||||
|
|
||||||
|
The size of the compressed archive itself.
|
||||||
|
|
||||||
|
## Computed closure fields
|
||||||
|
|
||||||
|
These fields are not stored at all, but computed by traverising the other other fields across all the store objects in a [closure].
|
||||||
|
|
||||||
|
* `closureSize`:
|
||||||
|
|
||||||
|
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
|
||||||
|
|
||||||
|
### `.narinfo` extra fields
|
||||||
|
|
||||||
|
* `closureSize`:
|
||||||
|
|
||||||
|
The total size of this store object and every other object in its [closure].
|
||||||
|
|
||||||
|
[closure]: @docroot@/glossary.md#gloss-closure
|
169
doc/manual/src/release-notes/rl-2.20.md
Normal file
169
doc/manual/src/release-notes/rl-2.20.md
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
# Release 2.20.0 (2024-01-29)
|
||||||
|
|
||||||
|
- Option `allowed-uris` can now match whole schemes in URIs without slashes [#9547](https://github.com/NixOS/nix/pull/9547)
|
||||||
|
|
||||||
|
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
|
||||||
|
Previously this only worked for schemes whose URIs used the `://` syntax.
|
||||||
|
|
||||||
|
- Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598)
|
||||||
|
|
||||||
|
Nix now also reports cgroup statistics when building through the Nix daemon and when doing remote builds using `ssh-ng`,
|
||||||
|
if both sides of the connection are using Nix 2.20 or newer.
|
||||||
|
|
||||||
|
- Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481)
|
||||||
|
|
||||||
|
[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`.
|
||||||
|
|
||||||
|
- Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093)
|
||||||
|
|
||||||
|
Add a new `eval-system` option.
|
||||||
|
Unlike `system`, it just overrides the value of `builtins.currentSystem`.
|
||||||
|
This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system.
|
||||||
|
In contrast, `system` also affects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense.
|
||||||
|
|
||||||
|
`eval-system` only takes effect if it is non-empty.
|
||||||
|
If empty (the default) `system` is used as before, so there is no breakage.
|
||||||
|
|
||||||
|
- Import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661)
|
||||||
|
|
||||||
|
When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option.
|
||||||
|
|
||||||
|
Because the resulting Nix expression must be copied back to the evaluation store in order to be imported, this requires the evaluation store to trust the build store's signatures.
|
||||||
|
|
||||||
|
- Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912)
|
||||||
|
|
||||||
|
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||||
|
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
||||||
|
|
||||||
|
- Rename `nix show-config` to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477)
|
||||||
|
|
||||||
|
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command line interface.
|
||||||
|
|
||||||
|
- Add command `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452)
|
||||||
|
|
||||||
|
This replaces the old `nix hash to-*` commands, which are still available but will emit a deprecation warning. Please convert as follows:
|
||||||
|
|
||||||
|
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead.
|
||||||
|
|
||||||
|
- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452)
|
||||||
|
|
||||||
|
Hash format `base32` was renamed to `nix32` since it used a special Nix-specific character set for
|
||||||
|
[Base32](https://en.wikipedia.org/wiki/Base32).
|
||||||
|
|
||||||
|
- `nix profile` now allows referring to elements by human-readable names [#8678](https://github.com/NixOS/nix/pull/8678)
|
||||||
|
|
||||||
|
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed.
|
||||||
|
|
||||||
|
**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix.
|
||||||
|
|
||||||
|
- Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809)
|
||||||
|
|
||||||
|
Adds a missing feature that was present in the old CLI, and matches our
|
||||||
|
plans to have similar flags for `nix hash convert` and `nix hash path`.
|
||||||
|
|
||||||
|
- Coercion errors include the failing value
|
||||||
|
|
||||||
|
The `error: cannot coerce a <TYPE> to a string` message now includes the value
|
||||||
|
which caused the error.
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: cannot coerce a set to a string
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: cannot coerce a set to a string: { aesSupport = «thunk»;
|
||||||
|
avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»;
|
||||||
|
canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion
|
||||||
|
= «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84
|
||||||
|
attributes elided»}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Type errors include the failing value
|
||||||
|
|
||||||
|
In errors like `value is an integer while a list was expected`, the message now
|
||||||
|
includes the failing value.
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: expected a string but found a set: { ghc810 = «thunk»;
|
||||||
|
ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»;
|
||||||
|
ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»;
|
||||||
|
ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555)
|
||||||
|
|
||||||
|
Source location information is now included in error messages more
|
||||||
|
consistently. Given this code:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
attr = {foo = "bar";};
|
||||||
|
key = {};
|
||||||
|
in
|
||||||
|
attr.${key}
|
||||||
|
```
|
||||||
|
|
||||||
|
Previously, Nix would show this unhelpful message when attempting to evaluate
|
||||||
|
it:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, the error message displays where the problematic value was found:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
at bad.nix:4:11:
|
||||||
|
|
||||||
|
3| key = {};
|
||||||
|
4| in attr.${key}
|
||||||
|
| ^
|
||||||
|
5|
|
||||||
|
|
||||||
|
error: expected a string but found a set
|
||||||
|
```
|
||||||
|
|
||||||
|
- Some stack overflow segfaults are fixed [#9616](https://github.com/NixOS/nix/issues/9616) [#9617](https://github.com/NixOS/nix/pull/9617)
|
||||||
|
|
||||||
|
The number of nested function calls has been restricted, to detect and report
|
||||||
|
infinite function call recursions. The default maximum call depth is 10,000 and
|
||||||
|
can be set with [the `max-call-depth`
|
||||||
|
option](@docroot@/command-ref/conf-file.md#conf-max-call-depth).
|
||||||
|
|
||||||
|
This replaces the `stack overflow (possible infinite recursion)` message.
|
||||||
|
|
||||||
|
- Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658)
|
||||||
|
|
||||||
|
`with` expressions using non-attrset values to resolve variables are now reported with proper positions, e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> with 1; a
|
||||||
|
error:
|
||||||
|
… while evaluating the first subexpression of a with expression
|
||||||
|
at «string»:1:1:
|
||||||
|
1| with 1; a
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: expected a set but found an integer
|
||||||
|
```
|
|
@ -190,7 +190,6 @@
|
||||||
boehmgc = final.boehmgc-nix;
|
boehmgc = final.boehmgc-nix;
|
||||||
libgit2 = final.libgit2-nix;
|
libgit2 = final.libgit2-nix;
|
||||||
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||||
changelog-d = final.changelog-d-nix;
|
|
||||||
} // {
|
} // {
|
||||||
# this is a proper separate downstream package, but put
|
# this is a proper separate downstream package, but put
|
||||||
# here also for back compat reasons.
|
# here also for back compat reasons.
|
||||||
|
@ -363,7 +362,7 @@
|
||||||
});
|
});
|
||||||
|
|
||||||
packages = forAllSystems (system: rec {
|
packages = forAllSystems (system: rec {
|
||||||
inherit (nixpkgsFor.${system}.native) nix;
|
inherit (nixpkgsFor.${system}.native) nix changelog-d-nix;
|
||||||
default = nix;
|
default = nix;
|
||||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
||||||
nix-static = nixpkgsFor.${system}.static.nix;
|
nix-static = nixpkgsFor.${system}.static.nix;
|
||||||
|
|
|
@ -43,7 +43,11 @@ The team meets twice a week:
|
||||||
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
|
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
|
||||||
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min)
|
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min).
|
||||||
|
Once a month, each team member checks the [Assigned](#assigned) column for prs/issues assigned to them, to either
|
||||||
|
- unblock it by providing input
|
||||||
|
- mark it as draft if it is blocked on the contributor
|
||||||
|
- escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again.
|
||||||
|
|
||||||
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#!/usr/bin/env nix-shell
|
#!/usr/bin/env nix
|
||||||
#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small
|
#!nix shell .#changelog-d-nix --command bash
|
||||||
# ^^^^^^^
|
|
||||||
# Only used for bash. shell.nix goes to the flake.
|
|
||||||
|
|
||||||
# --- CONFIGURATION ---
|
# --- CONFIGURATION ---
|
||||||
|
|
||||||
|
|
|
@ -27,8 +27,9 @@ release:
|
||||||
* Compile the release notes by running
|
* Compile the release notes by running
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
$ export VERSION=X.YY
|
||||||
$ git checkout -b release-notes
|
$ git checkout -b release-notes
|
||||||
$ VERSION=X.YY ./maintainers/release-notes
|
$ ./maintainers/release-notes
|
||||||
```
|
```
|
||||||
|
|
||||||
where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~.
|
where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~.
|
||||||
|
|
|
@ -97,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \
|
||||||
$(eval $(call run-test,$(test),$(install_test_init))) \
|
$(eval $(call run-test,$(test),$(install_test_init))) \
|
||||||
$(eval $(test-group).test-group: $(test).test)))
|
$(eval $(test-group).test-group: $(test).test)))
|
||||||
|
|
||||||
|
# Include makefiles requiring built programs.
|
||||||
|
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
|
||||||
|
|
||||||
|
|
||||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
||||||
|
|
||||||
|
|
||||||
|
|
12
package.nix
12
package.nix
|
@ -10,7 +10,6 @@
|
||||||
, boost
|
, boost
|
||||||
, brotli
|
, brotli
|
||||||
, bzip2
|
, bzip2
|
||||||
, changelog-d
|
|
||||||
, curl
|
, curl
|
||||||
, editline
|
, editline
|
||||||
, readline
|
, readline
|
||||||
|
@ -88,11 +87,6 @@
|
||||||
# - readline
|
# - readline
|
||||||
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
|
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
|
||||||
|
|
||||||
# Whether to compile `rl-next.md`, the release notes for the next
|
|
||||||
# not-yet-released version of Nix in the manul, from the individual
|
|
||||||
# change log entries in the directory.
|
|
||||||
, buildUnreleasedNotes ? false
|
|
||||||
|
|
||||||
# Whether to build the internal API docs, can be done separately from
|
# Whether to build the internal API docs, can be done separately from
|
||||||
# everything else.
|
# everything else.
|
||||||
, enableInternalAPIDocs ? false
|
, enableInternalAPIDocs ? false
|
||||||
|
@ -218,9 +212,6 @@ in {
|
||||||
] ++ lib.optionals (doInstallCheck || enableManual) [
|
] ++ lib.optionals (doInstallCheck || enableManual) [
|
||||||
jq # Also for custom mdBook preprocessor.
|
jq # Also for custom mdBook preprocessor.
|
||||||
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
|
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
|
||||||
# Official releases don't have rl-next, so we don't need to compile a
|
|
||||||
# changelog
|
|
||||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
|
|
||||||
++ lib.optional enableInternalAPIDocs doxygen
|
++ lib.optional enableInternalAPIDocs doxygen
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -378,9 +369,6 @@ in {
|
||||||
# Nix proper (which they depend on).
|
# Nix proper (which they depend on).
|
||||||
(installUnitTests -> doBuild)
|
(installUnitTests -> doBuild)
|
||||||
(doCheck -> doBuild)
|
(doCheck -> doBuild)
|
||||||
# We have to build the manual to build unreleased notes, as those
|
|
||||||
# are part of the manual
|
|
||||||
(buildUnreleasedNotes -> enableManual)
|
|
||||||
# The build process for the manual currently requires extracting
|
# The build process for the manual currently requires extracting
|
||||||
# data from the Nix executable we are trying to document.
|
# data from the Nix executable we are trying to document.
|
||||||
(enableManual -> doBuild)
|
(enableManual -> doBuild)
|
||||||
|
|
2
perl/.yath.rc
Normal file
2
perl/.yath.rc
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
[test]
|
||||||
|
-I=rel(lib/Nix)
|
|
@ -5,12 +5,12 @@
|
||||||
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
||||||
}:
|
}:
|
||||||
|
|
||||||
perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: {
|
||||||
name = "nix-perl-${nix.version}";
|
name = "nix-perl-${nix.version}";
|
||||||
|
|
||||||
src = fileset.toSource {
|
src = fileset.toSource {
|
||||||
root = ../.;
|
root = ../.;
|
||||||
fileset = fileset.unions [
|
fileset = fileset.unions ([
|
||||||
../.version
|
../.version
|
||||||
../m4
|
../m4
|
||||||
../mk
|
../mk
|
||||||
|
@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
./configure.ac
|
./configure.ac
|
||||||
./lib
|
./lib
|
||||||
./local.mk
|
./local.mk
|
||||||
];
|
] ++ lib.optionals finalAttrs.doCheck [
|
||||||
|
./.yath.rc
|
||||||
|
./t
|
||||||
|
]);
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
|
@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||||
|
|
||||||
|
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||||
|
doCheck = !stdenv.isDarwin;
|
||||||
|
|
||||||
|
nativeCheckInputs = [
|
||||||
|
perlPackages.Test2Harness
|
||||||
|
];
|
||||||
|
|
||||||
configureFlags = [
|
configureFlags = [
|
||||||
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
||||||
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
||||||
|
@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||||
})
|
}))
|
||||||
|
|
|
@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] );
|
||||||
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
|
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
|
||||||
|
|
||||||
our @EXPORT = qw(
|
our @EXPORT = qw(
|
||||||
setVerbosity
|
StoreWrapper
|
||||||
isValidPath queryReferences queryPathInfo queryDeriver queryPathHash
|
StoreWrapper::new
|
||||||
queryPathFromHashPart
|
StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash
|
||||||
topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths
|
StoreWrapper::queryPathFromHashPart
|
||||||
|
StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths
|
||||||
|
StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath
|
||||||
|
StoreWrapper::derivationFromPath
|
||||||
|
StoreWrapper::addTempRoot
|
||||||
|
StoreWrapper::queryRawRealisation
|
||||||
|
|
||||||
hashPath hashFile hashString convertHash
|
hashPath hashFile hashString convertHash
|
||||||
signString checkSignature
|
signString checkSignature
|
||||||
addToStore makeFixedOutputPath
|
|
||||||
derivationFromPath
|
|
||||||
addTempRoot
|
|
||||||
getBinDir getStoreDir
|
getBinDir getStoreDir
|
||||||
queryRawRealisation
|
setVerbosity
|
||||||
);
|
);
|
||||||
|
|
||||||
our $VERSION = '0.15';
|
our $VERSION = '0.15';
|
||||||
|
|
|
@ -17,47 +17,61 @@
|
||||||
#include <sodium.h>
|
#include <sodium.h>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
|
static bool libStoreInitialized = false;
|
||||||
|
|
||||||
static ref<Store> store()
|
struct StoreWrapper {
|
||||||
{
|
ref<Store> store;
|
||||||
static std::shared_ptr<Store> _store;
|
};
|
||||||
if (!_store) {
|
|
||||||
try {
|
|
||||||
initLibStore();
|
|
||||||
_store = openStore();
|
|
||||||
} catch (Error & e) {
|
|
||||||
croak("%s", e.what());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ref<Store>(_store);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
MODULE = Nix::Store PACKAGE = Nix::Store
|
MODULE = Nix::Store PACKAGE = Nix::Store
|
||||||
PROTOTYPES: ENABLE
|
PROTOTYPES: ENABLE
|
||||||
|
|
||||||
|
TYPEMAP: <<HERE
|
||||||
|
StoreWrapper * O_OBJECT
|
||||||
|
|
||||||
|
OUTPUT
|
||||||
|
O_OBJECT
|
||||||
|
sv_setref_pv( $arg, CLASS, (void*)$var );
|
||||||
|
|
||||||
|
INPUT
|
||||||
|
O_OBJECT
|
||||||
|
if ( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) ) {
|
||||||
|
$var = ($type)SvIV((SV*)SvRV( $arg ));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
warn( \"${Package}::$func_name() -- \"
|
||||||
|
\"$var not a blessed SV reference\");
|
||||||
|
XSRETURN_UNDEF;
|
||||||
|
}
|
||||||
|
HERE
|
||||||
|
|
||||||
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
|
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
|
||||||
#define dNOOP
|
#define dNOOP
|
||||||
|
|
||||||
|
void
|
||||||
|
StoreWrapper::DESTROY()
|
||||||
|
|
||||||
void init()
|
StoreWrapper *
|
||||||
CODE:
|
StoreWrapper::new(char * s = nullptr)
|
||||||
store();
|
|
||||||
|
|
||||||
|
|
||||||
void setVerbosity(int level)
|
|
||||||
CODE:
|
|
||||||
verbosity = (Verbosity) level;
|
|
||||||
|
|
||||||
|
|
||||||
int isValidPath(char * path)
|
|
||||||
CODE:
|
CODE:
|
||||||
|
static std::shared_ptr<Store> _store;
|
||||||
try {
|
try {
|
||||||
RETVAL = store()->isValidPath(store()->parseStorePath(path));
|
if (!libStoreInitialized) {
|
||||||
|
initLibStore();
|
||||||
|
libStoreInitialized = true;
|
||||||
|
}
|
||||||
|
if (items == 1) {
|
||||||
|
_store = openStore();
|
||||||
|
RETVAL = new StoreWrapper {
|
||||||
|
.store = ref<Store>{_store}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
RETVAL = new StoreWrapper {
|
||||||
|
.store = openStore(s)
|
||||||
|
};
|
||||||
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
@ -65,52 +79,81 @@ int isValidPath(char * path)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
SV * queryReferences(char * path)
|
void init()
|
||||||
|
CODE:
|
||||||
|
if (!libStoreInitialized) {
|
||||||
|
initLibStore();
|
||||||
|
libStoreInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void setVerbosity(int level)
|
||||||
|
CODE:
|
||||||
|
verbosity = (Verbosity) level;
|
||||||
|
|
||||||
|
|
||||||
|
int
|
||||||
|
StoreWrapper::isValidPath(char * path)
|
||||||
|
CODE:
|
||||||
|
try {
|
||||||
|
RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path));
|
||||||
|
} catch (Error & e) {
|
||||||
|
croak("%s", e.what());
|
||||||
|
}
|
||||||
|
OUTPUT:
|
||||||
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
|
SV *
|
||||||
|
StoreWrapper::queryReferences(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references)
|
for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathHash(char * path)
|
SV *
|
||||||
|
StoreWrapper::queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryDeriver(char * path)
|
SV *
|
||||||
|
StoreWrapper::queryDeriver(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||||
if (!info->deriver) XSRETURN_UNDEF;
|
if (!info->deriver) XSRETURN_UNDEF;
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathInfo(char * path, int base32)
|
SV *
|
||||||
|
StoreWrapper::queryPathInfo(char * path, int base32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||||
if (!info->deriver)
|
if (!info->deriver)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
AV * refs = newAV();
|
AV * refs = newAV();
|
||||||
for (auto & i : info->references)
|
for (auto & i : info->references)
|
||||||
av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||||
XPUSHs(sv_2mortal(newRV((SV *) refs)));
|
XPUSHs(sv_2mortal(newRV((SV *) refs)));
|
||||||
AV * sigs = newAV();
|
AV * sigs = newAV();
|
||||||
for (auto & i : info->sigs)
|
for (auto & i : info->sigs)
|
||||||
|
@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
SV * queryRawRealisation(char * outputId)
|
SV *
|
||||||
|
StoreWrapper::queryRawRealisation(char * outputId)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto realisation = store()->queryRealisation(DrvOutput::parse(outputId));
|
auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId));
|
||||||
if (realisation)
|
if (realisation)
|
||||||
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
|
||||||
else
|
else
|
||||||
|
@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathFromHashPart(char * hashPart)
|
SV *
|
||||||
|
StoreWrapper::queryPathFromHashPart(char * hashPart)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto path = store()->queryPathFromHashPart(hashPart);
|
auto path = THIS->store->queryPathFromHashPart(hashPart);
|
||||||
XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0)));
|
XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * computeFSClosure(int flipDirection, int includeOutputs, ...)
|
SV *
|
||||||
|
StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 2; n < items; ++n)
|
for (int n = 2; n < items; ++n)
|
||||||
store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
||||||
for (auto & i : paths)
|
for (auto & i : paths)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * topoSortPaths(...)
|
SV *
|
||||||
|
StoreWrapper::topoSortPaths(...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||||
auto sorted = store()->topoSortPaths(paths);
|
auto sorted = THIS->store->topoSortPaths(paths);
|
||||||
for (auto & i : sorted)
|
for (auto & i : sorted)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * followLinksToStorePath(char * path)
|
SV *
|
||||||
|
StoreWrapper::followLinksToStorePath(char * path)
|
||||||
CODE:
|
CODE:
|
||||||
try {
|
try {
|
||||||
RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0);
|
RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
@ -180,34 +228,37 @@ SV * followLinksToStorePath(char * path)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
void exportPaths(int fd, ...)
|
void
|
||||||
|
StoreWrapper::exportPaths(int fd, ...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||||
FdSink sink(fd);
|
FdSink sink(fd);
|
||||||
store()->exportPaths(paths, sink);
|
THIS->store->exportPaths(paths, sink);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void importPaths(int fd, int dontCheckSigs)
|
void
|
||||||
|
StoreWrapper::importPaths(int fd, int dontCheckSigs)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
FdSource source(fd);
|
FdSource source(fd);
|
||||||
store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * hashPath(char * algo, int base32, char * path)
|
SV *
|
||||||
|
hashPath(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
PosixSourceAccessor accessor;
|
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||||
Hash h = hashPath(
|
Hash h = hashPath(
|
||||||
accessor, CanonPath::fromCwd(path),
|
accessor, canonPath,
|
||||||
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
|
FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
|
||||||
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
|
@ -280,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
SV * addToStore(char * srcPath, int recursive, char * algo)
|
SV *
|
||||||
|
StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
PosixSourceAccessor accessor;
|
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath);
|
||||||
auto path = store()->addToStore(
|
auto path = THIS->store->addToStore(
|
||||||
std::string(baseNameOf(srcPath)),
|
std::string(baseNameOf(srcPath)),
|
||||||
accessor, CanonPath::fromCwd(srcPath),
|
accessor, canonPath,
|
||||||
method, parseHashAlgo(algo));
|
method, parseHashAlgo(algo));
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
SV *
|
||||||
|
StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hash = h,
|
.hash = h,
|
||||||
.references = {},
|
.references = {},
|
||||||
});
|
});
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * derivationFromPath(char * drvPath)
|
SV *
|
||||||
|
StoreWrapper::derivationFromPath(char * drvPath)
|
||||||
PREINIT:
|
PREINIT:
|
||||||
HV *hash;
|
HV *hash;
|
||||||
CODE:
|
CODE:
|
||||||
try {
|
try {
|
||||||
Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath));
|
Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath));
|
||||||
hash = newHV();
|
hash = newHV();
|
||||||
|
|
||||||
HV * outputs = newHV();
|
HV * outputs = newHV();
|
||||||
for (auto & i : drv.outputsAndOptPaths(*store())) {
|
for (auto & i : drv.outputsAndOptPaths(*THIS->store)) {
|
||||||
hv_store(
|
hv_store(
|
||||||
outputs, i.first.c_str(), i.first.size(),
|
outputs, i.first.c_str(), i.first.size(),
|
||||||
!i.second.second
|
!i.second.second
|
||||||
? newSV(0) /* null value */
|
? newSV(0) /* null value */
|
||||||
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0),
|
: newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0),
|
||||||
0);
|
0);
|
||||||
}
|
}
|
||||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||||
|
|
||||||
AV * inputDrvs = newAV();
|
AV * inputDrvs = newAV();
|
||||||
for (auto & i : drv.inputDrvs.map)
|
for (auto & i : drv.inputDrvs.map)
|
||||||
av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
||||||
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
||||||
|
|
||||||
AV * inputSrcs = newAV();
|
AV * inputSrcs = newAV();
|
||||||
for (auto & i : drv.inputSrcs)
|
for (auto & i : drv.inputSrcs)
|
||||||
av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||||
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
|
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
|
||||||
|
|
||||||
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
|
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
|
||||||
|
@ -361,10 +415,11 @@ SV * derivationFromPath(char * drvPath)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
void addTempRoot(char * storePath)
|
void
|
||||||
|
StoreWrapper::addTempRoot(char * storePath)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
store()->addTempRoot(store()->parseStorePath(storePath));
|
THIS->store->addTempRoot(THIS->store->parseStorePath(storePath));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1
|
||||||
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
||||||
|
|
||||||
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
||||||
|
|
||||||
|
check: all
|
||||||
|
yath test
|
||||||
|
|
13
perl/t/init.t
Normal file
13
perl/t/init.t
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
use Nix::Store;
|
||||||
|
|
||||||
|
my $s = new Nix::Store("dummy://");
|
||||||
|
|
||||||
|
my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar");
|
||||||
|
|
||||||
|
ok(!$res, "should not have path");
|
||||||
|
|
||||||
|
done_testing;
|
|
@ -137,11 +137,8 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
for (auto & m : machines) {
|
for (auto & m : machines) {
|
||||||
debug("considering building on remote machine '%s'", m.storeUri);
|
debug("considering building on remote machine '%s'", m.storeUri);
|
||||||
|
|
||||||
if (m.enabled
|
if (m.enabled &&
|
||||||
&& (neededSystem == "builtin"
|
m.systemSupported(neededSystem) &&
|
||||||
|| std::find(m.systemTypes.begin(),
|
|
||||||
m.systemTypes.end(),
|
|
||||||
neededSystem) != m.systemTypes.end()) &&
|
|
||||||
m.allSupported(requiredFeatures) &&
|
m.allSupported(requiredFeatures) &&
|
||||||
m.mandatoryMet(requiredFeatures))
|
m.mandatoryMet(requiredFeatures))
|
||||||
{
|
{
|
||||||
|
@ -205,7 +202,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
else
|
else
|
||||||
drvstr = "<unknown>";
|
drvstr = "<unknown>";
|
||||||
|
|
||||||
auto error = hintformat(errorText);
|
auto error = HintFmt(errorText);
|
||||||
error
|
error
|
||||||
% drvstr
|
% drvstr
|
||||||
% neededSystem
|
% neededSystem
|
||||||
|
@ -214,7 +211,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
|
|
||||||
for (auto & m : machines)
|
for (auto & m : machines)
|
||||||
error
|
error
|
||||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
% concatStringsSep<StringSet>(", ", m.systemTypes)
|
||||||
% m.maxJobs
|
% m.maxJobs
|
||||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||||
|
|
|
@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
for (auto & i : autoArgs) {
|
for (auto & i : autoArgs) {
|
||||||
auto v = state.allocValue();
|
auto v = state.allocValue();
|
||||||
if (i.second[0] == 'E')
|
if (i.second[0] == 'E')
|
||||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd())));
|
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
|
||||||
else
|
else
|
||||||
v->mkString(((std::string_view) i.second).substr(1));
|
v->mkString(((std::string_view) i.second).substr(1));
|
||||||
res.insert(state.symbols.create(i.first), v);
|
res.insert(state.symbols.create(i.first), v);
|
||||||
|
@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
return res.finish();
|
return res.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
|
||||||
{
|
{
|
||||||
if (EvalSettings::isPseudoUrl(s)) {
|
if (EvalSettings::isPseudoUrl(s)) {
|
||||||
auto storePath = fetchers::downloadTarball(
|
auto storePath = fetchers::downloadTarball(
|
||||||
|
@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
return state.rootPath(CanonPath(s, baseDir));
|
return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,6 @@ private:
|
||||||
std::map<std::string, std::string> autoArgs;
|
std::map<std::string, std::string> autoArgs;
|
||||||
};
|
};
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line)
|
||||||
editor.find("vim") != std::string::npos ||
|
editor.find("vim") != std::string::npos ||
|
||||||
editor.find("kak") != std::string::npos))
|
editor.find("kak") != std::string::npos))
|
||||||
args.push_back(fmt("+%d", line));
|
args.push_back(fmt("+%d", line));
|
||||||
args.push_back(path->abs());
|
args.push_back(path->string());
|
||||||
return args;
|
return args;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables(
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
else if (file) {
|
else if (file) {
|
||||||
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
auto dir = absPath(getCommandBaseDir());
|
||||||
|
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
Path dir = absPath(getCommandBaseDir());
|
||||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
|
|
|
@ -232,7 +232,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||||
|
|
||||||
if (pos) {
|
if (pos) {
|
||||||
out << pos;
|
out << *pos;
|
||||||
if (auto loc = pos->getCodeLines()) {
|
if (auto loc = pos->getCodeLines()) {
|
||||||
out << "\n";
|
out << "\n";
|
||||||
printCodeLines(out, "", *pos, *loc);
|
printCodeLines(out, "", *pos, *loc);
|
||||||
|
@ -243,10 +243,19 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool isFirstRepl = true;
|
||||||
|
|
||||||
void NixRepl::mainLoop()
|
void NixRepl::mainLoop()
|
||||||
{
|
{
|
||||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
if (isFirstRepl) {
|
||||||
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
|
std::string_view debuggerNotice = "";
|
||||||
|
if (state->debugRepl) {
|
||||||
|
debuggerNotice = " debugger";
|
||||||
|
}
|
||||||
|
notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice);
|
||||||
|
}
|
||||||
|
|
||||||
|
isFirstRepl = false;
|
||||||
|
|
||||||
loadFiles();
|
loadFiles();
|
||||||
|
|
||||||
|
@ -422,8 +431,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
||||||
// Quietly ignore parse errors.
|
// Quietly ignore parse errors.
|
||||||
} catch (EvalError & e) {
|
} catch (EvalError & e) {
|
||||||
// Quietly ignore evaluation errors.
|
// Quietly ignore evaluation errors.
|
||||||
} catch (UndefinedVarError & e) {
|
|
||||||
// Quietly ignore undefined variable errors.
|
|
||||||
} catch (BadURL & e) {
|
} catch (BadURL & e) {
|
||||||
// Quietly ignore BadURL flake-related errors.
|
// Quietly ignore BadURL flake-related errors.
|
||||||
}
|
}
|
||||||
|
@ -890,7 +897,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
||||||
|
|
||||||
Expr * NixRepl::parseString(std::string s)
|
Expr * NixRepl::parseString(std::string s)
|
||||||
{
|
{
|
||||||
return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv);
|
return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -65,10 +65,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||||
if (!attrIndex) {
|
if (!attrIndex) {
|
||||||
|
|
||||||
if (v->type() != nAttrs)
|
if (v->type() != nAttrs)
|
||||||
throw TypeError(
|
state.error<TypeError>(
|
||||||
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
||||||
attrPath,
|
attrPath,
|
||||||
showType(*v));
|
showType(*v)).debugThrow();
|
||||||
if (attr.empty())
|
if (attr.empty())
|
||||||
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
||||||
|
|
||||||
|
@ -88,10 +88,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
||||||
else {
|
else {
|
||||||
|
|
||||||
if (!v->isList())
|
if (!v->isList())
|
||||||
throw TypeError(
|
state.error<TypeError>(
|
||||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||||
attrPath,
|
attrPath,
|
||||||
showType(*v));
|
showType(*v)).debugThrow();
|
||||||
if (*attrIndex >= v->listSize())
|
if (*attrIndex >= v->listSize())
|
||||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||||
|
|
||||||
|
|
|
@ -491,7 +491,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||||
if (forceErrors)
|
if (forceErrors)
|
||||||
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
|
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
|
||||||
else
|
else
|
||||||
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name));
|
||||||
} else
|
} else
|
||||||
return std::make_shared<AttrCursor>(root,
|
return std::make_shared<AttrCursor>(root,
|
||||||
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
|
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
|
||||||
|
@ -500,7 +500,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||||
// evaluate to see whether 'name' exists
|
// evaluate to see whether 'name' exists
|
||||||
} else
|
} else
|
||||||
return nullptr;
|
return nullptr;
|
||||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
||||||
|
|
||||||
if (v.type() != nAttrs)
|
if (v.type() != nAttrs)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
auto attr = v.attrs->get(name);
|
auto attr = v.attrs->get(name);
|
||||||
|
|
||||||
|
@ -574,14 +574,14 @@ std::string AttrCursor::getString()
|
||||||
debug("using cached string attribute '%s'", getAttrPathStr());
|
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||||
return s->first;
|
return s->first;
|
||||||
} else
|
} else
|
||||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nString && v.type() != nPath)
|
if (v.type() != nString && v.type() != nPath)
|
||||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
return v.type() == nString ? v.c_str() : v.path().to_string();
|
return v.type() == nString ? v.c_str() : v.path().to_string();
|
||||||
}
|
}
|
||||||
|
@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
return *s;
|
return *s;
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext()
|
||||||
else if (v.type() == nPath)
|
else if (v.type() == nPath)
|
||||||
return {v.path().to_string(), {}};
|
return {v.path().to_string(), {}};
|
||||||
else
|
else
|
||||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool AttrCursor::getBool()
|
bool AttrCursor::getBool()
|
||||||
|
@ -643,14 +643,14 @@ bool AttrCursor::getBool()
|
||||||
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||||
return *b;
|
return *b;
|
||||||
} else
|
} else
|
||||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nBool)
|
if (v.type() != nBool)
|
||||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
return v.boolean;
|
return v.boolean;
|
||||||
}
|
}
|
||||||
|
@ -665,14 +665,14 @@ NixInt AttrCursor::getInt()
|
||||||
debug("using cached integer attribute '%s'", getAttrPathStr());
|
debug("using cached integer attribute '%s'", getAttrPathStr());
|
||||||
return i->x;
|
return i->x;
|
||||||
} else
|
} else
|
||||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nInt)
|
if (v.type() != nInt)
|
||||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
return v.integer;
|
return v.integer;
|
||||||
}
|
}
|
||||||
|
@ -687,7 +687,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||||
debug("using cached list of strings attribute '%s'", getAttrPathStr());
|
debug("using cached list of strings attribute '%s'", getAttrPathStr());
|
||||||
return *l;
|
return *l;
|
||||||
} else
|
} else
|
||||||
throw TypeError("'%s' is not a list of strings", getAttrPathStr());
|
root->state.error<TypeError>("'%s' is not a list of strings", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -697,7 +697,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
||||||
root->state.forceValue(v, noPos);
|
root->state.forceValue(v, noPos);
|
||||||
|
|
||||||
if (v.type() != nList)
|
if (v.type() != nList)
|
||||||
throw TypeError("'%s' is not a list", getAttrPathStr());
|
root->state.error<TypeError>("'%s' is not a list", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
std::vector<std::string> res;
|
std::vector<std::string> res;
|
||||||
|
|
||||||
|
@ -720,14 +720,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
||||||
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||||
return *attrs;
|
return *attrs;
|
||||||
} else
|
} else
|
||||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto & v = forceValue();
|
auto & v = forceValue();
|
||||||
|
|
||||||
if (v.type() != nAttrs)
|
if (v.type() != nAttrs)
|
||||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||||
|
|
||||||
std::vector<Symbol> attrs;
|
std::vector<Symbol> attrs;
|
||||||
for (auto & attr : *getValue().attrs)
|
for (auto & attr : *getValue().attrs)
|
||||||
|
|
113
src/libexpr/eval-error.cc
Normal file
113
src/libexpr/eval-error.cc
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
#include "eval-error.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
#include "value.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withExitStatus(unsigned int exitStatus)
|
||||||
|
{
|
||||||
|
error.withExitStatus(exitStatus);
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(PosIdx pos)
|
||||||
|
{
|
||||||
|
error.err.pos = error.state.positions[pos];
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(Value & value, PosIdx fallback)
|
||||||
|
{
|
||||||
|
return atPos(value.determinePos(fallback));
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withTrace(PosIdx pos, const std::string_view text)
|
||||||
|
{
|
||||||
|
error.err.traces.push_front(
|
||||||
|
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false});
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrameTrace(PosIdx pos, const std::string_view text)
|
||||||
|
{
|
||||||
|
error.err.traces.push_front(
|
||||||
|
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true});
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withSuggestions(Suggestions & s)
|
||||||
|
{
|
||||||
|
error.err.suggestions = s;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr & expr)
|
||||||
|
{
|
||||||
|
// NOTE: This is abusing side-effects.
|
||||||
|
// TODO: check compatibility with nested debugger calls.
|
||||||
|
// TODO: What side-effects??
|
||||||
|
error.state.debugTraces.push_front(DebugTrace{
|
||||||
|
.pos = error.state.positions[expr.getPos()],
|
||||||
|
.expr = expr,
|
||||||
|
.env = env,
|
||||||
|
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||||
|
.isError = true});
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
EvalErrorBuilder<T> & EvalErrorBuilder<T>::addTrace(PosIdx pos, HintFmt hint, bool frame)
|
||||||
|
{
|
||||||
|
error.addTrace(error.state.positions[pos], hint, frame);
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
template<typename... Args>
|
||||||
|
EvalErrorBuilder<T> &
|
||||||
|
EvalErrorBuilder<T>::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs)
|
||||||
|
{
|
||||||
|
|
||||||
|
addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...));
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class T>
|
||||||
|
void EvalErrorBuilder<T>::debugThrow()
|
||||||
|
{
|
||||||
|
if (error.state.debugRepl && !error.state.debugTraces.empty()) {
|
||||||
|
const DebugTrace & last = error.state.debugTraces.front();
|
||||||
|
const Env * env = &last.env;
|
||||||
|
const Expr * expr = &last.expr;
|
||||||
|
error.state.runDebugRepl(&error, *env, *expr);
|
||||||
|
}
|
||||||
|
|
||||||
|
// `EvalState` is the only class that can construct an `EvalErrorBuilder`,
|
||||||
|
// and it does so in dynamic storage. This is the final method called on
|
||||||
|
// any such instance and must delete itself before throwing the underlying
|
||||||
|
// error.
|
||||||
|
auto error = std::move(this->error);
|
||||||
|
delete this;
|
||||||
|
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
template class EvalErrorBuilder<EvalError>;
|
||||||
|
template class EvalErrorBuilder<AssertionError>;
|
||||||
|
template class EvalErrorBuilder<ThrownError>;
|
||||||
|
template class EvalErrorBuilder<Abort>;
|
||||||
|
template class EvalErrorBuilder<TypeError>;
|
||||||
|
template class EvalErrorBuilder<UndefinedVarError>;
|
||||||
|
template class EvalErrorBuilder<MissingArgumentError>;
|
||||||
|
template class EvalErrorBuilder<InfiniteRecursionError>;
|
||||||
|
template class EvalErrorBuilder<CachedEvalError>;
|
||||||
|
template class EvalErrorBuilder<InvalidPathError>;
|
||||||
|
|
||||||
|
}
|
104
src/libexpr/eval-error.hh
Normal file
104
src/libexpr/eval-error.hh
Normal file
|
@ -0,0 +1,104 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
|
||||||
|
#include "error.hh"
|
||||||
|
#include "pos-idx.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
struct Env;
|
||||||
|
struct Expr;
|
||||||
|
struct Value;
|
||||||
|
|
||||||
|
class EvalState;
|
||||||
|
template<class T>
|
||||||
|
class EvalErrorBuilder;
|
||||||
|
|
||||||
|
class EvalError : public Error
|
||||||
|
{
|
||||||
|
template<class T>
|
||||||
|
friend class EvalErrorBuilder;
|
||||||
|
public:
|
||||||
|
EvalState & state;
|
||||||
|
|
||||||
|
EvalError(EvalState & state, ErrorInfo && errorInfo)
|
||||||
|
: Error(errorInfo)
|
||||||
|
, state(state)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs)
|
||||||
|
: Error(formatString, formatArgs...)
|
||||||
|
, state(state)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
MakeError(ParseError, Error);
|
||||||
|
MakeError(AssertionError, EvalError);
|
||||||
|
MakeError(ThrownError, AssertionError);
|
||||||
|
MakeError(Abort, EvalError);
|
||||||
|
MakeError(TypeError, EvalError);
|
||||||
|
MakeError(UndefinedVarError, EvalError);
|
||||||
|
MakeError(MissingArgumentError, EvalError);
|
||||||
|
MakeError(CachedEvalError, EvalError);
|
||||||
|
MakeError(InfiniteRecursionError, EvalError);
|
||||||
|
|
||||||
|
struct InvalidPathError : public EvalError
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
Path path;
|
||||||
|
InvalidPathError(EvalState & state, const Path & path)
|
||||||
|
: EvalError(state, "path '%s' is not valid", path)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow`
|
||||||
|
* method must be the final method in any such `EvalErrorBuilder` usage, and it
|
||||||
|
* handles deleting the object.
|
||||||
|
*/
|
||||||
|
template<class T>
|
||||||
|
class EvalErrorBuilder final
|
||||||
|
{
|
||||||
|
friend class EvalState;
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
explicit EvalErrorBuilder(EvalState & state, const Args &... args)
|
||||||
|
: error(T(state, args...))
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
T error;
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withExitStatus(unsigned int exitStatus);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(PosIdx pos);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(Value & value, PosIdx fallback = noPos);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withTrace(PosIdx pos, const std::string_view text);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withSuggestions(Suggestions & s);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrame(const Env & e, const Expr & ex);
|
||||||
|
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & addTrace(PosIdx pos, HintFmt hint, bool frame = false);
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
|
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> &
|
||||||
|
addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete the `EvalErrorBuilder` and throw the underlying exception.
|
||||||
|
*/
|
||||||
|
[[gnu::noinline, gnu::noreturn]] void debugThrow();
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -1,7 +1,9 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
|
#include "print.hh"
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
|
#include "eval-error.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -114,7 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
||||||
PosIdx pos = getPos();
|
PosIdx pos = getPos();
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nAttrs) {
|
if (v.type() != nAttrs) {
|
||||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a set but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -124,7 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e
|
||||||
{
|
{
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (!v.isList()) {
|
if (!v.isList()) {
|
||||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a list but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
#include "eval-settings.hh"
|
#include "eval-settings.hh"
|
||||||
#include "hash.hh"
|
#include "hash.hh"
|
||||||
#include "primops.hh"
|
#include "primops.hh"
|
||||||
|
#include "print-options.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
|
@ -20,6 +21,9 @@
|
||||||
#include "gc-small-vector.hh"
|
#include "gc-small-vector.hh"
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
#include "fetch-to-store.hh"
|
#include "fetch-to-store.hh"
|
||||||
|
#include "tarball.hh"
|
||||||
|
#include "flake/flakeref.hh"
|
||||||
|
#include "parser-tab.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <chrono>
|
#include <chrono>
|
||||||
|
@ -29,9 +33,9 @@
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <sys/time.h>
|
#include <sys/time.h>
|
||||||
#include <sys/resource.h>
|
#include <sys/resource.h>
|
||||||
#include <iostream>
|
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <functional>
|
#include <functional>
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
#include <sys/resource.h>
|
#include <sys/resource.h>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
@ -335,46 +339,6 @@ void initGC()
|
||||||
gcInitialised = true;
|
gcInitialised = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
ErrorBuilder & ErrorBuilder::atPos(PosIdx pos)
|
|
||||||
{
|
|
||||||
info.errPos = state.positions[pos];
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text)
|
|
||||||
{
|
|
||||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false });
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text)
|
|
||||||
{
|
|
||||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true });
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s)
|
|
||||||
{
|
|
||||||
info.suggestions = s;
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr)
|
|
||||||
{
|
|
||||||
// NOTE: This is abusing side-effects.
|
|
||||||
// TODO: check compatibility with nested debugger calls.
|
|
||||||
state.debugTraces.push_front(DebugTrace {
|
|
||||||
.pos = nullptr,
|
|
||||||
.expr = expr,
|
|
||||||
.env = env,
|
|
||||||
.hint = hintformat("Fake frame for debugging purposes"),
|
|
||||||
.isError = true
|
|
||||||
});
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
EvalState::EvalState(
|
EvalState::EvalState(
|
||||||
const SearchPath & _searchPath,
|
const SearchPath & _searchPath,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
|
@ -416,18 +380,28 @@ EvalState::EvalState(
|
||||||
, sPath(symbols.create("path"))
|
, sPath(symbols.create("path"))
|
||||||
, sPrefix(symbols.create("prefix"))
|
, sPrefix(symbols.create("prefix"))
|
||||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||||
|
, exprSymbols{
|
||||||
|
.sub = symbols.create("__sub"),
|
||||||
|
.lessThan = symbols.create("__lessThan"),
|
||||||
|
.mul = symbols.create("__mul"),
|
||||||
|
.div = symbols.create("__div"),
|
||||||
|
.or_ = symbols.create("or"),
|
||||||
|
.findFile = symbols.create("__findFile"),
|
||||||
|
.nixPath = symbols.create("__nixPath"),
|
||||||
|
.body = symbols.create("body"),
|
||||||
|
}
|
||||||
, repair(NoRepair)
|
, repair(NoRepair)
|
||||||
, emptyBindings(0)
|
, emptyBindings(0)
|
||||||
, rootFS(
|
, rootFS(
|
||||||
evalSettings.restrictEval || evalSettings.pureEval
|
evalSettings.restrictEval || evalSettings.pureEval
|
||||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(), {},
|
||||||
[](const CanonPath & path) -> RestrictedPathError {
|
[](const CanonPath & path) -> RestrictedPathError {
|
||||||
auto modeInformation = evalSettings.pureEval
|
auto modeInformation = evalSettings.pureEval
|
||||||
? "in pure evaluation mode (use '--impure' to override)"
|
? "in pure evaluation mode (use '--impure' to override)"
|
||||||
: "in restricted mode";
|
: "in restricted mode";
|
||||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||||
}))
|
}))
|
||||||
: makeFSInputAccessor(CanonPath::root))
|
: makeFSInputAccessor())
|
||||||
, corepkgsFS(makeMemoryInputAccessor())
|
, corepkgsFS(makeMemoryInputAccessor())
|
||||||
, internalFS(makeMemoryInputAccessor())
|
, internalFS(makeMemoryInputAccessor())
|
||||||
, derivationInternal{corepkgsFS->addFile(
|
, derivationInternal{corepkgsFS->addFile(
|
||||||
|
@ -730,7 +704,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
||||||
if (se.up && env.up) {
|
if (se.up && env.up) {
|
||||||
std::cout << "static: ";
|
std::cout << "static: ";
|
||||||
printStaticEnvBindings(st, se);
|
printStaticEnvBindings(st, se);
|
||||||
printWithBindings(st, env);
|
if (se.isWith)
|
||||||
|
printWithBindings(st, env);
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
printEnvBindings(st, *se.up, *env.up, ++lvl);
|
printEnvBindings(st, *se.up, *env.up, ++lvl);
|
||||||
} else {
|
} else {
|
||||||
|
@ -742,7 +717,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
||||||
std::cout << st[i.first] << " ";
|
std::cout << st[i.first] << " ";
|
||||||
std::cout << ANSI_NORMAL;
|
std::cout << ANSI_NORMAL;
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
printWithBindings(st, env); // probably nothing there for the top level.
|
if (se.isWith)
|
||||||
|
printWithBindings(st, env); // probably nothing there for the top level.
|
||||||
std::cout << std::endl;
|
std::cout << std::endl;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -764,7 +740,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En
|
||||||
if (env.up && se.up) {
|
if (env.up && se.up) {
|
||||||
mapStaticEnvBindings(st, *se.up, *env.up, vm);
|
mapStaticEnvBindings(st, *se.up, *env.up, vm);
|
||||||
|
|
||||||
if (!env.values[0]->isThunk()) {
|
if (se.isWith && !env.values[0]->isThunk()) {
|
||||||
// add 'with' bindings.
|
// add 'with' bindings.
|
||||||
Bindings::iterator j = env.values[0]->attrs->begin();
|
Bindings::iterator j = env.values[0]->attrs->begin();
|
||||||
while (j != env.values[0]->attrs->end()) {
|
while (j != env.values[0]->attrs->end()) {
|
||||||
|
@ -797,7 +773,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||||
? std::make_unique<DebugTraceStacker>(
|
? std::make_unique<DebugTraceStacker>(
|
||||||
*this,
|
*this,
|
||||||
DebugTrace {
|
DebugTrace {
|
||||||
.pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()],
|
.pos = error->info().pos ? error->info().pos : positions[expr.getPos()],
|
||||||
.expr = expr,
|
.expr = expr,
|
||||||
.env = env,
|
.env = env,
|
||||||
.hint = error->info().msg,
|
.hint = error->info().msg,
|
||||||
|
@ -807,12 +783,10 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
||||||
|
|
||||||
if (error)
|
if (error)
|
||||||
{
|
{
|
||||||
printError("%s\n\n", error->what());
|
printError("%s\n", error->what());
|
||||||
|
|
||||||
if (trylevel > 0 && error->info().level != lvlInfo)
|
if (trylevel > 0 && error->info().level != lvlInfo)
|
||||||
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
|
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
|
||||||
|
|
||||||
printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto se = getStaticEnv(expr);
|
auto se = getStaticEnv(expr);
|
||||||
|
@ -829,23 +803,23 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2)
|
||||||
|
|
||||||
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
|
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
|
||||||
{
|
{
|
||||||
e.addTrace(positions[pos], hintfmt(s, s2), frame);
|
e.addTrace(positions[pos], HintFmt(s, s2), frame);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template<typename... Args>
|
||||||
static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
Expr & expr,
|
Expr & expr,
|
||||||
Env & env,
|
Env & env,
|
||||||
std::shared_ptr<Pos> && pos,
|
std::shared_ptr<Pos> && pos,
|
||||||
const char * s,
|
const Args & ... formatArgs)
|
||||||
const std::string & s2)
|
|
||||||
{
|
{
|
||||||
return std::make_unique<DebugTraceStacker>(state,
|
return std::make_unique<DebugTraceStacker>(state,
|
||||||
DebugTrace {
|
DebugTrace {
|
||||||
.pos = std::move(pos),
|
.pos = std::move(pos),
|
||||||
.expr = expr,
|
.expr = expr,
|
||||||
.env = env,
|
.env = env,
|
||||||
.hint = hintfmt(s, s2),
|
.hint = HintFmt(formatArgs...),
|
||||||
.isError = false
|
.isError = false
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -916,7 +890,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
|
||||||
return j->value;
|
return j->value;
|
||||||
}
|
}
|
||||||
if (!fromWith->parentWith)
|
if (!fromWith->parentWith)
|
||||||
error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow<UndefinedVarError>();
|
error<UndefinedVarError>("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow();
|
||||||
for (size_t l = fromWith->prevWith; l; --l, env = env->up) ;
|
for (size_t l = fromWith->prevWith; l; --l, env = env->up) ;
|
||||||
fromWith = fromWith->parentWith;
|
fromWith = fromWith->parentWith;
|
||||||
}
|
}
|
||||||
|
@ -1122,7 +1096,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
||||||
// computation.
|
// computation.
|
||||||
if (mustBeTrivial &&
|
if (mustBeTrivial &&
|
||||||
!(dynamic_cast<ExprAttrs *>(e)))
|
!(dynamic_cast<ExprAttrs *>(e)))
|
||||||
error("file '%s' must be an attribute set", path).debugThrow<EvalError>();
|
error<EvalError>("file '%s' must be an attribute set", path).debugThrow();
|
||||||
eval(e, v);
|
eval(e, v);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
||||||
|
@ -1153,7 +1127,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri
|
||||||
Value v;
|
Value v;
|
||||||
e->eval(*this, env, v);
|
e->eval(*this, env, v);
|
||||||
if (v.type() != nBool)
|
if (v.type() != nBool)
|
||||||
error("value is %1% while a Boolean was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a Boolean but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).withFrame(env, *e).debugThrow();
|
||||||
return v.boolean;
|
return v.boolean;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
|
@ -1167,7 +1145,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
|
||||||
try {
|
try {
|
||||||
e->eval(*this, env, v);
|
e->eval(*this, env, v);
|
||||||
if (v.type() != nAttrs)
|
if (v.type() != nAttrs)
|
||||||
error("value is %1% while a set was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a set but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).withFrame(env, *e).debugThrow();
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
|
@ -1276,7 +1258,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
||||||
auto nameSym = state.symbols.create(nameVal.string_view());
|
auto nameSym = state.symbols.create(nameVal.string_view());
|
||||||
Bindings::iterator j = v.attrs->find(nameSym);
|
Bindings::iterator j = v.attrs->find(nameSym);
|
||||||
if (j != v.attrs->end())
|
if (j != v.attrs->end())
|
||||||
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
state.error<EvalError>("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow();
|
||||||
|
|
||||||
i.valueExpr->setName(nameSym);
|
i.valueExpr->setName(nameSym);
|
||||||
/* Keep sorted order so find can catch duplicates */
|
/* Keep sorted order so find can catch duplicates */
|
||||||
|
@ -1302,6 +1284,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
|
||||||
for (auto & i : attrs->attrs)
|
for (auto & i : attrs->attrs)
|
||||||
env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
|
env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
|
||||||
|
|
||||||
|
auto dts = state.debugRepl
|
||||||
|
? makeDebugTraceStacker(
|
||||||
|
state,
|
||||||
|
*this,
|
||||||
|
env2,
|
||||||
|
getPos()
|
||||||
|
? std::make_shared<Pos>(state.positions[getPos()])
|
||||||
|
: nullptr,
|
||||||
|
"while evaluating a '%1%' expression",
|
||||||
|
"let"
|
||||||
|
)
|
||||||
|
: nullptr;
|
||||||
|
|
||||||
body->eval(state, env2, v);
|
body->eval(state, env2, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1388,8 +1383,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
||||||
for (auto & attr : *vAttrs->attrs)
|
for (auto & attr : *vAttrs->attrs)
|
||||||
allAttrNames.insert(state.symbols[attr.name]);
|
allAttrNames.insert(state.symbols[attr.name]);
|
||||||
auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]);
|
auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]);
|
||||||
state.error("attribute '%1%' missing", state.symbols[name])
|
state.error<EvalError>("attribute '%1%' missing", state.symbols[name])
|
||||||
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow<EvalError>();
|
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
vAttrs = j->value;
|
vAttrs = j->value;
|
||||||
|
@ -1462,7 +1457,7 @@ public:
|
||||||
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
||||||
{
|
{
|
||||||
if (callDepth > evalSettings.maxCallDepth)
|
if (callDepth > evalSettings.maxCallDepth)
|
||||||
error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow<EvalError>();
|
error<EvalError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow();
|
||||||
CallDepth _level(callDepth);
|
CallDepth _level(callDepth);
|
||||||
|
|
||||||
auto trace = evalSettings.traceFunctionCalls
|
auto trace = evalSettings.traceFunctionCalls
|
||||||
|
@ -1520,13 +1515,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
auto j = args[0]->attrs->get(i.name);
|
auto j = args[0]->attrs->get(i.name);
|
||||||
if (!j) {
|
if (!j) {
|
||||||
if (!i.def) {
|
if (!i.def) {
|
||||||
error("function '%1%' called without required argument '%2%'",
|
error<TypeError>("function '%1%' called without required argument '%2%'",
|
||||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||||
symbols[i.name])
|
symbols[i.name])
|
||||||
.atPos(lambda.pos)
|
.atPos(lambda.pos)
|
||||||
.withTrace(pos, "from call site")
|
.withTrace(pos, "from call site")
|
||||||
.withFrame(*fun.lambda.env, lambda)
|
.withFrame(*fun.lambda.env, lambda)
|
||||||
.debugThrow<TypeError>();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
env2.values[displ++] = i.def->maybeThunk(*this, env2);
|
env2.values[displ++] = i.def->maybeThunk(*this, env2);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1546,14 +1541,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
for (auto & formal : lambda.formals->formals)
|
for (auto & formal : lambda.formals->formals)
|
||||||
formalNames.insert(symbols[formal.name]);
|
formalNames.insert(symbols[formal.name]);
|
||||||
auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
|
auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
|
||||||
error("function '%1%' called with unexpected argument '%2%'",
|
error<TypeError>("function '%1%' called with unexpected argument '%2%'",
|
||||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||||
symbols[i.name])
|
symbols[i.name])
|
||||||
.atPos(lambda.pos)
|
.atPos(lambda.pos)
|
||||||
.withTrace(pos, "from call site")
|
.withTrace(pos, "from call site")
|
||||||
.withSuggestions(suggestions)
|
.withSuggestions(suggestions)
|
||||||
.withFrame(*fun.lambda.env, lambda)
|
.withFrame(*fun.lambda.env, lambda)
|
||||||
.debugThrow<TypeError>();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
abort(); // can't happen
|
abort(); // can't happen
|
||||||
}
|
}
|
||||||
|
@ -1685,7 +1680,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
error("attempt to call something which is not a function but %1%", showType(vCur)).atPos(pos).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"attempt to call something which is not a function but %1%: %2%",
|
||||||
|
showType(vCur),
|
||||||
|
ValuePrinter(*this, vCur, errorPrintOptions))
|
||||||
|
.atPos(pos)
|
||||||
|
.debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
vRes = vCur;
|
vRes = vCur;
|
||||||
|
@ -1694,6 +1694,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
||||||
|
|
||||||
void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
|
auto dts = state.debugRepl
|
||||||
|
? makeDebugTraceStacker(
|
||||||
|
state,
|
||||||
|
*this,
|
||||||
|
env,
|
||||||
|
getPos()
|
||||||
|
? std::make_shared<Pos>(state.positions[getPos()])
|
||||||
|
: nullptr,
|
||||||
|
"while calling a function"
|
||||||
|
)
|
||||||
|
: nullptr;
|
||||||
|
|
||||||
Value vFun;
|
Value vFun;
|
||||||
fun->eval(state, env, vFun);
|
fun->eval(state, env, vFun);
|
||||||
|
|
||||||
|
@ -1755,12 +1767,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||||
if (j != args.end()) {
|
if (j != args.end()) {
|
||||||
attrs.insert(*j);
|
attrs.insert(*j);
|
||||||
} else if (!i.def) {
|
} else if (!i.def) {
|
||||||
error(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
error<MissingArgumentError>(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||||
Nix attempted to evaluate a function as a top level expression; in
|
Nix attempted to evaluate a function as a top level expression; in
|
||||||
this case it must have its arguments supplied either by default
|
this case it must have its arguments supplied either by default
|
||||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||||
https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
|
https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
|
||||||
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow<MissingArgumentError>();
|
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1791,7 +1803,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
||||||
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
|
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
|
||||||
std::ostringstream out;
|
std::ostringstream out;
|
||||||
cond->show(state.symbols, out);
|
cond->show(state.symbols, out);
|
||||||
state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow<AssertionError>();
|
state.error<AssertionError>("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow();
|
||||||
}
|
}
|
||||||
body->eval(state, env, v);
|
body->eval(state, env, v);
|
||||||
}
|
}
|
||||||
|
@ -1969,14 +1981,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
nf = n;
|
nf = n;
|
||||||
nf += vTmp.fpoint;
|
nf += vTmp.fpoint;
|
||||||
} else
|
} else
|
||||||
state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
state.error<EvalError>("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||||
} else if (firstType == nFloat) {
|
} else if (firstType == nFloat) {
|
||||||
if (vTmp.type() == nInt) {
|
if (vTmp.type() == nInt) {
|
||||||
nf += vTmp.integer;
|
nf += vTmp.integer;
|
||||||
} else if (vTmp.type() == nFloat) {
|
} else if (vTmp.type() == nFloat) {
|
||||||
nf += vTmp.fpoint;
|
nf += vTmp.fpoint;
|
||||||
} else
|
} else
|
||||||
state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
state.error<EvalError>("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||||
} else {
|
} else {
|
||||||
if (s.empty()) s.reserve(es->size());
|
if (s.empty()) s.reserve(es->size());
|
||||||
/* skip canonization of first path, which would only be not
|
/* skip canonization of first path, which would only be not
|
||||||
|
@ -1998,7 +2010,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
||||||
v.mkFloat(nf);
|
v.mkFloat(nf);
|
||||||
else if (firstType == nPath) {
|
else if (firstType == nPath) {
|
||||||
if (!context.empty())
|
if (!context.empty())
|
||||||
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
|
state.error<EvalError>("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow();
|
||||||
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
|
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
|
||||||
} else
|
} else
|
||||||
v.mkStringMove(c_str(), context);
|
v.mkStringMove(c_str(), context);
|
||||||
|
@ -2013,8 +2025,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
||||||
|
|
||||||
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
||||||
{
|
{
|
||||||
state.error("infinite recursion encountered")
|
state.error<InfiniteRecursionError>("infinite recursion encountered")
|
||||||
.debugThrow<InfiniteRecursionError>();
|
.atPos(v.determinePos(noPos))
|
||||||
|
.debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
// always force this to be separate, otherwise forceValue may inline it and take
|
// always force this to be separate, otherwise forceValue may inline it and take
|
||||||
|
@ -2028,7 +2041,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos)
|
||||||
try {
|
try {
|
||||||
std::rethrow_exception(e);
|
std::rethrow_exception(e);
|
||||||
} catch (InfiniteRecursionError & e) {
|
} catch (InfiniteRecursionError & e) {
|
||||||
e.err.errPos = positions[pos];
|
e.atPos(positions[pos]);
|
||||||
} catch (...) {
|
} catch (...) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2076,12 +2089,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt
|
||||||
try {
|
try {
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nInt)
|
if (v.type() != nInt)
|
||||||
error("value is %1% while an integer was expected", showType(v)).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected an integer but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
return v.integer;
|
return v.integer;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return v.integer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2092,7 +2111,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err
|
||||||
if (v.type() == nInt)
|
if (v.type() == nInt)
|
||||||
return v.integer;
|
return v.integer;
|
||||||
else if (v.type() != nFloat)
|
else if (v.type() != nFloat)
|
||||||
error("value is %1% while a float was expected", showType(v)).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a float but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
return v.fpoint;
|
return v.fpoint;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
|
@ -2106,12 +2129,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx
|
||||||
try {
|
try {
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nBool)
|
if (v.type() != nBool)
|
||||||
error("value is %1% while a Boolean was expected", showType(v)).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a Boolean but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
return v.boolean;
|
return v.boolean;
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return v.boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2126,7 +2155,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro
|
||||||
try {
|
try {
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nFunction && !isFunctor(v))
|
if (v.type() != nFunction && !isFunctor(v))
|
||||||
error("value is %1% while a function was expected", showType(v)).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a function but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
throw;
|
throw;
|
||||||
|
@ -2139,7 +2172,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
||||||
try {
|
try {
|
||||||
forceValue(v, pos);
|
forceValue(v, pos);
|
||||||
if (v.type() != nString)
|
if (v.type() != nString)
|
||||||
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
error<TypeError>(
|
||||||
|
"expected a string but found %1%: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
return v.string_view();
|
return v.string_view();
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(positions[pos], errorCtx);
|
e.addTrace(positions[pos], errorCtx);
|
||||||
|
@ -2168,7 +2205,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s
|
||||||
{
|
{
|
||||||
auto s = forceString(v, pos, errorCtx);
|
auto s = forceString(v, pos, errorCtx);
|
||||||
if (v.context()) {
|
if (v.context()) {
|
||||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error<EvalError>("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
@ -2233,9 +2270,13 @@ BackedStringView EvalState::coerceToString(
|
||||||
return std::move(*maybeString);
|
return std::move(*maybeString);
|
||||||
auto i = v.attrs->find(sOutPath);
|
auto i = v.attrs->find(sOutPath);
|
||||||
if (i == v.attrs->end()) {
|
if (i == v.attrs->end()) {
|
||||||
error("cannot coerce %1% to a string", showType(v))
|
error<TypeError>(
|
||||||
|
"cannot coerce %1% to a string: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
)
|
||||||
.withTrace(pos, errorCtx)
|
.withTrace(pos, errorCtx)
|
||||||
.debugThrow<TypeError>();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
return coerceToString(pos, *i->value, context, errorCtx,
|
return coerceToString(pos, *i->value, context, errorCtx,
|
||||||
coerceMore, copyToStore, canonicalizePath);
|
coerceMore, copyToStore, canonicalizePath);
|
||||||
|
@ -2243,7 +2284,7 @@ BackedStringView EvalState::coerceToString(
|
||||||
|
|
||||||
if (v.type() == nExternal) {
|
if (v.type() == nExternal) {
|
||||||
try {
|
try {
|
||||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore);
|
return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(nullptr, errorCtx);
|
e.addTrace(nullptr, errorCtx);
|
||||||
throw;
|
throw;
|
||||||
|
@ -2279,23 +2320,26 @@ BackedStringView EvalState::coerceToString(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
error("cannot coerce %1% to a string", showType(v))
|
error<TypeError>("cannot coerce %1% to a string: %2%",
|
||||||
|
showType(v),
|
||||||
|
ValuePrinter(*this, v, errorPrintOptions)
|
||||||
|
)
|
||||||
.withTrace(pos, errorCtx)
|
.withTrace(pos, errorCtx)
|
||||||
.debugThrow<TypeError>();
|
.debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path)
|
StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path)
|
||||||
{
|
{
|
||||||
if (nix::isDerivation(path.path.abs()))
|
if (nix::isDerivation(path.path.abs()))
|
||||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
error<EvalError>("file names are not allowed to end in '%1%'", drvExtension).debugThrow();
|
||||||
|
|
||||||
auto i = srcToStore.find(path);
|
auto i = srcToStore.find(path);
|
||||||
|
|
||||||
auto dstPath = i != srcToStore.end()
|
auto dstPath = i != srcToStore.end()
|
||||||
? i->second
|
? i->second
|
||||||
: [&]() {
|
: [&]() {
|
||||||
auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||||
allowPath(dstPath);
|
allowPath(dstPath);
|
||||||
srcToStore.insert_or_assign(path, dstPath);
|
srcToStore.insert_or_assign(path, dstPath);
|
||||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||||
|
@ -2337,7 +2381,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext
|
||||||
relative to the root filesystem. */
|
relative to the root filesystem. */
|
||||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||||
if (path == "" || path[0] != '/')
|
if (path == "" || path[0] != '/')
|
||||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error<EvalError>("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow();
|
||||||
return rootPath(CanonPath(path));
|
return rootPath(CanonPath(path));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2347,7 +2391,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon
|
||||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||||
if (auto storePath = store->maybeParseStorePath(path))
|
if (auto storePath = store->maybeParseStorePath(path))
|
||||||
return *storePath;
|
return *storePath;
|
||||||
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error<EvalError>("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -2357,18 +2401,18 @@ std::pair<SingleDerivedPath, std::string_view> EvalState::coerceToSingleDerivedP
|
||||||
auto s = forceString(v, context, pos, errorCtx);
|
auto s = forceString(v, context, pos, errorCtx);
|
||||||
auto csize = context.size();
|
auto csize = context.size();
|
||||||
if (csize != 1)
|
if (csize != 1)
|
||||||
error(
|
error<EvalError>(
|
||||||
"string '%s' has %d entries in its context. It should only have exactly one entry",
|
"string '%s' has %d entries in its context. It should only have exactly one entry",
|
||||||
s, csize)
|
s, csize)
|
||||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
.withTrace(pos, errorCtx).debugThrow();
|
||||||
auto derivedPath = std::visit(overloaded {
|
auto derivedPath = std::visit(overloaded {
|
||||||
[&](NixStringContextElem::Opaque && o) -> SingleDerivedPath {
|
[&](NixStringContextElem::Opaque && o) -> SingleDerivedPath {
|
||||||
return std::move(o);
|
return std::move(o);
|
||||||
},
|
},
|
||||||
[&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath {
|
[&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath {
|
||||||
error(
|
error<EvalError>(
|
||||||
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
||||||
s).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
s).withTrace(pos, errorCtx).debugThrow();
|
||||||
},
|
},
|
||||||
[&](NixStringContextElem::Built && b) -> SingleDerivedPath {
|
[&](NixStringContextElem::Built && b) -> SingleDerivedPath {
|
||||||
return std::move(b);
|
return std::move(b);
|
||||||
|
@ -2391,16 +2435,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
||||||
error message. */
|
error message. */
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const SingleDerivedPath::Opaque & o) {
|
[&](const SingleDerivedPath::Opaque & o) {
|
||||||
error(
|
error<EvalError>(
|
||||||
"path string '%s' has context with the different path '%s'",
|
"path string '%s' has context with the different path '%s'",
|
||||||
s, sExpected)
|
s, sExpected)
|
||||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
.withTrace(pos, errorCtx).debugThrow();
|
||||||
},
|
},
|
||||||
[&](const SingleDerivedPath::Built & b) {
|
[&](const SingleDerivedPath::Built & b) {
|
||||||
error(
|
error<EvalError>(
|
||||||
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
||||||
s, b.output, b.drvPath->to_string(*store), sExpected)
|
s, b.output, b.drvPath->to_string(*store), sExpected)
|
||||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
.withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
}, derivedPath.raw());
|
}, derivedPath.raw());
|
||||||
}
|
}
|
||||||
|
@ -2485,7 +2529,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
||||||
|
|
||||||
case nThunk: // Must not be left by forceValue
|
case nThunk: // Must not be left by forceValue
|
||||||
default:
|
default:
|
||||||
error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
error<EvalError>("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2636,11 +2680,187 @@ void EvalState::printStatistics()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
SourcePath resolveExprPath(SourcePath path)
|
||||||
{
|
{
|
||||||
throw TypeError({
|
unsigned int followCount = 0, maxFollow = 1024;
|
||||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
|
||||||
});
|
/* If `path' is a symlink, follow it. This is so that relative
|
||||||
|
path references work. */
|
||||||
|
while (!path.path.isRoot()) {
|
||||||
|
// Basic cycle/depth limit to avoid infinite loops.
|
||||||
|
if (++followCount >= maxFollow)
|
||||||
|
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||||
|
auto p = path.parent().resolveSymlinks() / path.baseName();
|
||||||
|
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||||
|
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If `path' refers to a directory, append `/default.nix'. */
|
||||||
|
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||||
|
return path / "default.nix";
|
||||||
|
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||||
|
{
|
||||||
|
return parseExprFromFile(path, staticBaseEnv);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
|
{
|
||||||
|
auto buffer = path.resolveSymlinks().readFile();
|
||||||
|
// readFile hopefully have left some extra space for terminators
|
||||||
|
buffer.append("\0\0", 2);
|
||||||
|
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
|
{
|
||||||
|
auto s = make_ref<std::string>(std::move(s_));
|
||||||
|
s->append("\0\0", 2);
|
||||||
|
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||||
|
{
|
||||||
|
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parseStdin()
|
||||||
|
{
|
||||||
|
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
||||||
|
auto buffer = drainFD(0);
|
||||||
|
// drainFD should have left some extra space for terminators
|
||||||
|
buffer.append("\0\0", 2);
|
||||||
|
auto s = make_ref<std::string>(std::move(buffer));
|
||||||
|
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SourcePath EvalState::findFile(const std::string_view path)
|
||||||
|
{
|
||||||
|
return findFile(searchPath, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||||
|
{
|
||||||
|
for (auto & i : searchPath.elements) {
|
||||||
|
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
||||||
|
|
||||||
|
if (!suffixOpt) continue;
|
||||||
|
auto suffix = *suffixOpt;
|
||||||
|
|
||||||
|
auto rOpt = resolveSearchPathPath(i.path);
|
||||||
|
if (!rOpt) continue;
|
||||||
|
auto r = *rOpt;
|
||||||
|
|
||||||
|
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||||
|
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasPrefix(path, "nix/"))
|
||||||
|
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||||
|
|
||||||
|
error<ThrownError>(
|
||||||
|
evalSettings.pureEval
|
||||||
|
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||||
|
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||||
|
path
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||||
|
{
|
||||||
|
auto & value = value0.s;
|
||||||
|
auto i = searchPathResolved.find(value);
|
||||||
|
if (i != searchPathResolved.end()) return i->second;
|
||||||
|
|
||||||
|
std::optional<std::string> res;
|
||||||
|
|
||||||
|
if (EvalSettings::isPseudoUrl(value)) {
|
||||||
|
try {
|
||||||
|
auto storePath = fetchers::downloadTarball(
|
||||||
|
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||||
|
res = { store->toRealPath(storePath) };
|
||||||
|
} catch (FileTransferError & e) {
|
||||||
|
logWarning({
|
||||||
|
.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (hasPrefix(value, "flake:")) {
|
||||||
|
experimentalFeatureSettings.require(Xp::Flakes);
|
||||||
|
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||||
|
debug("fetching flake search path element '%s''", value);
|
||||||
|
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||||
|
res = { store->toRealPath(storePath) };
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
auto path = absPath(value);
|
||||||
|
|
||||||
|
/* Allow access to paths in the search path. */
|
||||||
|
if (initAccessControl) {
|
||||||
|
allowPath(path);
|
||||||
|
if (store->isInStore(path)) {
|
||||||
|
try {
|
||||||
|
StorePathSet closure;
|
||||||
|
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||||
|
for (auto & p : closure)
|
||||||
|
allowPath(p);
|
||||||
|
} catch (InvalidPath &) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pathExists(path))
|
||||||
|
res = { path };
|
||||||
|
else {
|
||||||
|
logWarning({
|
||||||
|
.msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||||
|
});
|
||||||
|
res = std::nullopt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (res)
|
||||||
|
debug("resolved search path element '%s' to '%s'", value, *res);
|
||||||
|
else
|
||||||
|
debug("failed to resolve search path element '%s'", value);
|
||||||
|
|
||||||
|
searchPathResolved.emplace(value, res);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Expr * EvalState::parse(
|
||||||
|
char * text,
|
||||||
|
size_t length,
|
||||||
|
Pos::Origin origin,
|
||||||
|
const SourcePath & basePath,
|
||||||
|
std::shared_ptr<StaticEnv> & staticEnv)
|
||||||
|
{
|
||||||
|
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols);
|
||||||
|
|
||||||
|
result->bindVars(*this, staticEnv);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||||
|
{
|
||||||
|
state.error<TypeError>(
|
||||||
|
"cannot coerce %1% to a string: %2%", showType(), *this
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "attr-set.hh"
|
#include "attr-set.hh"
|
||||||
|
#include "eval-error.hh"
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "value.hh"
|
#include "value.hh"
|
||||||
#include "nixexpr.hh"
|
#include "nixexpr.hh"
|
||||||
|
@ -147,49 +148,10 @@ struct DebugTrace {
|
||||||
std::shared_ptr<Pos> pos;
|
std::shared_ptr<Pos> pos;
|
||||||
const Expr & expr;
|
const Expr & expr;
|
||||||
const Env & env;
|
const Env & env;
|
||||||
hintformat hint;
|
HintFmt hint;
|
||||||
bool isError;
|
bool isError;
|
||||||
};
|
};
|
||||||
|
|
||||||
void debugError(Error * e, Env & env, Expr & expr);
|
|
||||||
|
|
||||||
class ErrorBuilder
|
|
||||||
{
|
|
||||||
private:
|
|
||||||
EvalState & state;
|
|
||||||
ErrorInfo info;
|
|
||||||
|
|
||||||
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
|
|
||||||
|
|
||||||
public:
|
|
||||||
template<typename... Args>
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
static ErrorBuilder * create(EvalState & s, const Args & ... args)
|
|
||||||
{
|
|
||||||
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
|
|
||||||
}
|
|
||||||
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
ErrorBuilder & atPos(PosIdx pos);
|
|
||||||
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
|
|
||||||
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
|
|
||||||
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
ErrorBuilder & withSuggestions(Suggestions & s);
|
|
||||||
|
|
||||||
[[nodiscard, gnu::noinline]]
|
|
||||||
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
|
|
||||||
|
|
||||||
template<class ErrorType>
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void debugThrow();
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
class EvalState : public std::enable_shared_from_this<EvalState>
|
class EvalState : public std::enable_shared_from_this<EvalState>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
@ -207,6 +169,8 @@ public:
|
||||||
sPrefix,
|
sPrefix,
|
||||||
sOutputSpecified;
|
sOutputSpecified;
|
||||||
|
|
||||||
|
const Expr::AstSymbols exprSymbols;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If set, force copying files to the Nix store even if they
|
* If set, force copying files to the Nix store even if they
|
||||||
* already exist there.
|
* already exist there.
|
||||||
|
@ -272,39 +236,11 @@ public:
|
||||||
|
|
||||||
void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
|
void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
|
||||||
|
|
||||||
template<class E>
|
template<class T, typename... Args>
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void debugThrowLastTrace(E && error)
|
|
||||||
{
|
|
||||||
debugThrow(error, nullptr, nullptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
template<class E>
|
|
||||||
[[gnu::noinline, gnu::noreturn]]
|
|
||||||
void debugThrow(E && error, const Env * env, const Expr * expr)
|
|
||||||
{
|
|
||||||
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
|
|
||||||
if (!env || !expr) {
|
|
||||||
const DebugTrace & last = debugTraces.front();
|
|
||||||
env = &last.env;
|
|
||||||
expr = &last.expr;
|
|
||||||
}
|
|
||||||
runDebugRepl(&error, *env, *expr);
|
|
||||||
}
|
|
||||||
|
|
||||||
throw std::move(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is dangerous, but gets in line with the idea that error creation and
|
|
||||||
// throwing should not allocate on the stack of hot functions.
|
|
||||||
// as long as errors are immediately thrown, it works.
|
|
||||||
ErrorBuilder * errorBuilder;
|
|
||||||
|
|
||||||
template<typename... Args>
|
|
||||||
[[nodiscard, gnu::noinline]]
|
[[nodiscard, gnu::noinline]]
|
||||||
ErrorBuilder & error(const Args & ... args) {
|
EvalErrorBuilder<T> & error(const Args & ... args) {
|
||||||
errorBuilder = ErrorBuilder::create(*this, args...);
|
// `EvalErrorBuilder::debugThrow` performs the corresponding `delete`.
|
||||||
return *errorBuilder;
|
return *new EvalErrorBuilder<T>(*this, args...);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
@ -370,6 +306,11 @@ public:
|
||||||
*/
|
*/
|
||||||
SourcePath rootPath(CanonPath path);
|
SourcePath rootPath(CanonPath path);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Variant which accepts relative paths too.
|
||||||
|
*/
|
||||||
|
SourcePath rootPath(PathView path);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Allow access to a path.
|
* Allow access to a path.
|
||||||
*/
|
*/
|
||||||
|
@ -843,22 +784,6 @@ SourcePath resolveExprPath(SourcePath path);
|
||||||
*/
|
*/
|
||||||
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
||||||
|
|
||||||
struct InvalidPathError : EvalError
|
|
||||||
{
|
|
||||||
Path path;
|
|
||||||
InvalidPathError(const Path & path);
|
|
||||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
|
||||||
~InvalidPathError() throw () { };
|
|
||||||
#endif
|
|
||||||
};
|
|
||||||
|
|
||||||
template<class ErrorType>
|
|
||||||
void ErrorBuilder::debugThrow()
|
|
||||||
{
|
|
||||||
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
|
|
||||||
state.debugThrowLastTrace(ErrorType(info));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#include "eval-inline.hh"
|
#include "eval-inline.hh"
|
||||||
|
|
|
@ -147,15 +147,15 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
NixStringContext emptyContext = {};
|
NixStringContext emptyContext = {};
|
||||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
||||||
} else
|
} else
|
||||||
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
state.error<TypeError>("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||||
state.symbols[attr.name], showType(*attr.value));
|
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||||
}
|
}
|
||||||
#pragma GCC diagnostic pop
|
#pragma GCC diagnostic pop
|
||||||
}
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(
|
e.addTrace(
|
||||||
state.positions[attr.pos],
|
state.positions[attr.pos],
|
||||||
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
||||||
try {
|
try {
|
||||||
input.ref = FlakeRef::fromAttrs(attrs);
|
input.ref = FlakeRef::fromAttrs(attrs);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
|
e.addTrace(state.positions[pos], HintFmt("while evaluating flake input"));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -295,15 +295,15 @@ static Flake getFlake(
|
||||||
std::vector<std::string> ss;
|
std::vector<std::string> ss;
|
||||||
for (auto elem : setting.value->listItems()) {
|
for (auto elem : setting.value->listItems()) {
|
||||||
if (elem->type() != nString)
|
if (elem->type() != nString)
|
||||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||||
state.symbols[setting.name], showType(*setting.value));
|
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||||
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
||||||
}
|
}
|
||||||
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
throw TypeError("flake configuration setting '%s' is %s",
|
state.error<TypeError>("flake configuration setting '%s' is %s",
|
||||||
state.symbols[setting.name], showType(*setting.value));
|
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -865,11 +865,11 @@ static void prim_flakeRefToString(
|
||||||
attrs.emplace(state.symbols[attr.name],
|
attrs.emplace(state.symbols[attr.name],
|
||||||
std::string(attr.value->string_view()));
|
std::string(attr.value->string_view()));
|
||||||
} else {
|
} else {
|
||||||
state.error(
|
state.error<EvalError>(
|
||||||
"flake reference attribute sets may only contain integers, Booleans, "
|
"flake reference attribute sets may only contain integers, Booleans, "
|
||||||
"and strings, but attribute '%s' is %s",
|
"and strings, but attribute '%s' is %s",
|
||||||
state.symbols[attr.name],
|
state.symbols[attr.name],
|
||||||
showType(*attr.value)).debugThrow<EvalError>();
|
showType(*attr.value)).debugThrow();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
auto flakeRef = FlakeRef::fromAttrs(attrs);
|
auto flakeRef = FlakeRef::fromAttrs(attrs);
|
||||||
|
|
|
@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const
|
||||||
{
|
{
|
||||||
if (name == "" && attrs) {
|
if (name == "" && attrs) {
|
||||||
auto i = attrs->find(state->sName);
|
auto i = attrs->find(state->sName);
|
||||||
if (i == attrs->end()) throw TypeError("derivation name missing");
|
if (i == attrs->end()) state->error<TypeError>("derivation name missing").debugThrow();
|
||||||
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
||||||
}
|
}
|
||||||
return name;
|
return name;
|
||||||
|
@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)");
|
else
|
||||||
|
state.error<TypeError>("expression does not evaluate to a derivation (or a set or list of those)").debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
#include "json-to-value.hh"
|
#include "json-to-value.hh"
|
||||||
|
#include "value.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
|
||||||
#include <variant>
|
#include <variant>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
@ -159,7 +161,7 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
||||||
throw JSONParseError(ex.what());
|
throw JSONParseError("%s", ex.what());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "eval.hh"
|
#include "error.hh"
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
MakeError(JSONParseError, EvalError);
|
class EvalState;
|
||||||
|
struct Value;
|
||||||
|
|
||||||
|
MakeError(JSONParseError, Error);
|
||||||
|
|
||||||
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||||
|
|
||||||
|
|
|
@ -29,12 +29,7 @@ using namespace nix;
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
#define CUR_POS state->at(*yylloc)
|
||||||
{
|
|
||||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
|
||||||
}
|
|
||||||
|
|
||||||
#define CUR_POS makeCurPos(*yylloc, data)
|
|
||||||
|
|
||||||
static void initLoc(YYLTYPE * loc)
|
static void initLoc(YYLTYPE * loc)
|
||||||
{
|
{
|
||||||
|
@ -151,9 +146,9 @@ or { return OR_KW; }
|
||||||
try {
|
try {
|
||||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||||
} catch (const boost::bad_lexical_cast &) {
|
} catch (const boost::bad_lexical_cast &) {
|
||||||
throw ParseError({
|
throw ParseError(ErrorInfo{
|
||||||
.msg = hintfmt("invalid integer '%1%'", yytext),
|
.msg = HintFmt("invalid integer '%1%'", yytext),
|
||||||
.errPos = data->state.positions[CUR_POS],
|
.pos = state->positions[CUR_POS],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return INT_LIT;
|
return INT_LIT;
|
||||||
|
@ -161,9 +156,9 @@ or { return OR_KW; }
|
||||||
{FLOAT} { errno = 0;
|
{FLOAT} { errno = 0;
|
||||||
yylval->nf = strtod(yytext, 0);
|
yylval->nf = strtod(yytext, 0);
|
||||||
if (errno != 0)
|
if (errno != 0)
|
||||||
throw ParseError({
|
throw ParseError(ErrorInfo{
|
||||||
.msg = hintfmt("invalid float '%1%'", yytext),
|
.msg = HintFmt("invalid float '%1%'", yytext),
|
||||||
.errPos = data->state.positions[CUR_POS],
|
.pos = state->positions[CUR_POS],
|
||||||
});
|
});
|
||||||
return FLOAT_LIT;
|
return FLOAT_LIT;
|
||||||
}
|
}
|
||||||
|
@ -186,7 +181,7 @@ or { return OR_KW; }
|
||||||
/* It is impossible to match strings ending with '$' with one
|
/* It is impossible to match strings ending with '$' with one
|
||||||
regex because trailing contexts are only valid at the end
|
regex because trailing contexts are only valid at the end
|
||||||
of a rule. (A sane but undocumented limitation.) */
|
of a rule. (A sane but undocumented limitation.) */
|
||||||
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
yylval->str = unescapeStr(state->symbols, yytext, yyleng);
|
||||||
return STR;
|
return STR;
|
||||||
}
|
}
|
||||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||||
|
@ -214,7 +209,7 @@ or { return OR_KW; }
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\'\'\\{ANY} {
|
<IND_STRING>\'\'\\{ANY} {
|
||||||
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2);
|
||||||
return IND_STR;
|
return IND_STR;
|
||||||
}
|
}
|
||||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||||
|
@ -290,9 +285,9 @@ or { return OR_KW; }
|
||||||
|
|
||||||
<INPATH_SLASH>{ANY} |
|
<INPATH_SLASH>{ANY} |
|
||||||
<INPATH_SLASH><<EOF>> {
|
<INPATH_SLASH><<EOF>> {
|
||||||
throw ParseError({
|
throw ParseError(ErrorInfo{
|
||||||
.msg = hintfmt("path has a trailing slash"),
|
.msg = HintFmt("path has a trailing slash"),
|
||||||
.errPos = data->state.positions[CUR_POS],
|
.pos = state->positions[CUR_POS],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,8 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
unsigned long Expr::nrExprs = 0;
|
||||||
|
|
||||||
ExprBlackHole eBlackHole;
|
ExprBlackHole eBlackHole;
|
||||||
|
|
||||||
// FIXME: remove, because *symbols* are abstract and do not have a single
|
// FIXME: remove, because *symbols* are abstract and do not have a single
|
||||||
|
@ -294,10 +296,10 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||||
enclosing `with'. If there is no `with', then we can issue an
|
enclosing `with'. If there is no `with', then we can issue an
|
||||||
"undefined variable" error now. */
|
"undefined variable" error now. */
|
||||||
if (withLevel == -1)
|
if (withLevel == -1)
|
||||||
throw UndefinedVarError({
|
es.error<UndefinedVarError>(
|
||||||
.msg = hintfmt("undefined variable '%1%'", es.symbols[name]),
|
"undefined variable '%1%'",
|
||||||
.errPos = es.positions[pos]
|
es.symbols[name]
|
||||||
});
|
).atPos(pos).debugThrow();
|
||||||
for (auto * e = env.get(); e && !fromWith; e = e->up)
|
for (auto * e = env.get(); e && !fromWith; e = e->up)
|
||||||
fromWith = e->isWith;
|
fromWith = e->isWith;
|
||||||
this->level = withLevel;
|
this->level = withLevel;
|
||||||
|
@ -407,9 +409,6 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||||
|
|
||||||
void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||||
{
|
{
|
||||||
if (es.debugRepl)
|
|
||||||
es.exprEnvs.insert(std::make_pair(this, env));
|
|
||||||
|
|
||||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
||||||
|
|
||||||
Displacement displ = 0;
|
Displacement displ = 0;
|
||||||
|
@ -421,6 +420,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||||
for (auto & i : attrs->attrs)
|
for (auto & i : attrs->attrs)
|
||||||
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
|
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
|
||||||
|
|
||||||
|
if (es.debugRepl)
|
||||||
|
es.exprEnvs.insert(std::make_pair(this, newEnv));
|
||||||
|
|
||||||
body->bindVars(es, newEnv);
|
body->bindVars(es, newEnv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -445,9 +447,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (es.debugRepl)
|
|
||||||
es.exprEnvs.insert(std::make_pair(this, env));
|
|
||||||
|
|
||||||
attrs->bindVars(es, env);
|
attrs->bindVars(es, env);
|
||||||
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
|
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
|
||||||
body->bindVars(es, newEnv);
|
body->bindVars(es, newEnv);
|
||||||
|
|
|
@ -9,110 +9,13 @@
|
||||||
#include "error.hh"
|
#include "error.hh"
|
||||||
#include "chunked-vector.hh"
|
#include "chunked-vector.hh"
|
||||||
#include "position.hh"
|
#include "position.hh"
|
||||||
|
#include "eval-error.hh"
|
||||||
|
#include "pos-idx.hh"
|
||||||
|
#include "pos-table.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
||||||
MakeError(EvalError, Error);
|
|
||||||
MakeError(ParseError, Error);
|
|
||||||
MakeError(AssertionError, EvalError);
|
|
||||||
MakeError(ThrownError, AssertionError);
|
|
||||||
MakeError(Abort, EvalError);
|
|
||||||
MakeError(TypeError, EvalError);
|
|
||||||
MakeError(UndefinedVarError, Error);
|
|
||||||
MakeError(MissingArgumentError, EvalError);
|
|
||||||
|
|
||||||
class InfiniteRecursionError : public EvalError
|
|
||||||
{
|
|
||||||
friend class EvalState;
|
|
||||||
public:
|
|
||||||
using EvalError::EvalError;
|
|
||||||
};
|
|
||||||
|
|
||||||
class PosIdx {
|
|
||||||
friend class PosTable;
|
|
||||||
|
|
||||||
private:
|
|
||||||
uint32_t id;
|
|
||||||
|
|
||||||
explicit PosIdx(uint32_t id): id(id) {}
|
|
||||||
|
|
||||||
public:
|
|
||||||
PosIdx() : id(0) {}
|
|
||||||
|
|
||||||
explicit operator bool() const { return id > 0; }
|
|
||||||
|
|
||||||
bool operator <(const PosIdx other) const { return id < other.id; }
|
|
||||||
|
|
||||||
bool operator ==(const PosIdx other) const { return id == other.id; }
|
|
||||||
|
|
||||||
bool operator !=(const PosIdx other) const { return id != other.id; }
|
|
||||||
};
|
|
||||||
|
|
||||||
class PosTable
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
class Origin {
|
|
||||||
friend PosTable;
|
|
||||||
private:
|
|
||||||
// must always be invalid by default, add() replaces this with the actual value.
|
|
||||||
// subsequent add() calls use this index as a token to quickly check whether the
|
|
||||||
// current origins.back() can be reused or not.
|
|
||||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
|
||||||
|
|
||||||
// Used for searching in PosTable::[].
|
|
||||||
explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {}
|
|
||||||
|
|
||||||
public:
|
|
||||||
const Pos::Origin origin;
|
|
||||||
|
|
||||||
Origin(Pos::Origin origin): origin(origin) {}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct Offset {
|
|
||||||
uint32_t line, column;
|
|
||||||
};
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::vector<Origin> origins;
|
|
||||||
ChunkedVector<Offset, 8192> offsets;
|
|
||||||
|
|
||||||
public:
|
|
||||||
PosTable(): offsets(1024)
|
|
||||||
{
|
|
||||||
origins.reserve(1024);
|
|
||||||
}
|
|
||||||
|
|
||||||
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
|
||||||
{
|
|
||||||
const auto idx = offsets.add({line, column}).second;
|
|
||||||
if (origins.empty() || origins.back().idx != origin.idx) {
|
|
||||||
origin.idx = idx;
|
|
||||||
origins.push_back(origin);
|
|
||||||
}
|
|
||||||
return PosIdx(idx + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
Pos operator[](PosIdx p) const
|
|
||||||
{
|
|
||||||
if (p.id == 0 || p.id > offsets.size())
|
|
||||||
return {};
|
|
||||||
const auto idx = p.id - 1;
|
|
||||||
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
|
||||||
this is guaranteed to never rewind origin.begin because the first
|
|
||||||
key is always 0. */
|
|
||||||
const auto pastOrigin = std::upper_bound(
|
|
||||||
origins.begin(), origins.end(), Origin(idx),
|
|
||||||
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
|
||||||
const auto origin = *std::prev(pastOrigin);
|
|
||||||
const auto offset = offsets[idx];
|
|
||||||
return {offset.line, offset.column, origin.origin};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
inline PosIdx noPos = {};
|
|
||||||
|
|
||||||
|
|
||||||
struct Env;
|
struct Env;
|
||||||
struct Value;
|
struct Value;
|
||||||
class EvalState;
|
class EvalState;
|
||||||
|
@ -140,6 +43,11 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||||
|
|
||||||
struct Expr
|
struct Expr
|
||||||
{
|
{
|
||||||
|
struct AstSymbols {
|
||||||
|
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
static unsigned long nrExprs;
|
static unsigned long nrExprs;
|
||||||
Expr() {
|
Expr() {
|
||||||
nrExprs++;
|
nrExprs++;
|
||||||
|
|
271
src/libexpr/parser-state.hh
Normal file
271
src/libexpr/parser-state.hh
Normal file
|
@ -0,0 +1,271 @@
|
||||||
|
#pragma once
|
||||||
|
///@file
|
||||||
|
|
||||||
|
#include "eval.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @note Storing a C-style `char *` and `size_t` allows us to avoid
|
||||||
|
* having to define the special members that using string_view here
|
||||||
|
* would implicitly delete.
|
||||||
|
*/
|
||||||
|
struct StringToken
|
||||||
|
{
|
||||||
|
const char * p;
|
||||||
|
size_t l;
|
||||||
|
bool hasIndentation;
|
||||||
|
operator std::string_view() const { return {p, l}; }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ParserLocation
|
||||||
|
{
|
||||||
|
int first_line, first_column;
|
||||||
|
int last_line, last_column;
|
||||||
|
|
||||||
|
// backup to recover from yyless(0)
|
||||||
|
int stashed_first_line, stashed_first_column;
|
||||||
|
int stashed_last_line, stashed_last_column;
|
||||||
|
|
||||||
|
void stash() {
|
||||||
|
stashed_first_line = first_line;
|
||||||
|
stashed_first_column = first_column;
|
||||||
|
stashed_last_line = last_line;
|
||||||
|
stashed_last_column = last_column;
|
||||||
|
}
|
||||||
|
|
||||||
|
void unstash() {
|
||||||
|
first_line = stashed_first_line;
|
||||||
|
first_column = stashed_first_column;
|
||||||
|
last_line = stashed_last_line;
|
||||||
|
last_column = stashed_last_column;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ParserState
|
||||||
|
{
|
||||||
|
SymbolTable & symbols;
|
||||||
|
PosTable & positions;
|
||||||
|
Expr * result;
|
||||||
|
SourcePath basePath;
|
||||||
|
PosTable::Origin origin;
|
||||||
|
const ref<InputAccessor> rootFS;
|
||||||
|
const Expr::AstSymbols & s;
|
||||||
|
|
||||||
|
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
|
||||||
|
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
|
||||||
|
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos);
|
||||||
|
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
|
||||||
|
Expr * stripIndentation(const PosIdx pos,
|
||||||
|
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||||
|
PosIdx at(const ParserLocation & loc);
|
||||||
|
};
|
||||||
|
|
||||||
|
inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||||
|
{
|
||||||
|
throw ParseError({
|
||||||
|
.msg = HintFmt("attribute '%1%' already defined at %2%",
|
||||||
|
showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||||
|
.pos = positions[pos]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||||
|
{
|
||||||
|
throw ParseError({
|
||||||
|
.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||||
|
.pos = positions[pos]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos)
|
||||||
|
{
|
||||||
|
AttrPath::iterator i;
|
||||||
|
// All attrpaths have at least one attr
|
||||||
|
assert(!attrPath.empty());
|
||||||
|
// Checking attrPath validity.
|
||||||
|
// ===========================
|
||||||
|
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||||
|
if (i->symbol) {
|
||||||
|
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||||
|
if (j != attrs->attrs.end()) {
|
||||||
|
if (!j->second.inherited) {
|
||||||
|
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||||
|
if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
|
||||||
|
attrs = attrs2;
|
||||||
|
} else
|
||||||
|
dupAttr(attrPath, pos, j->second.pos);
|
||||||
|
} else {
|
||||||
|
ExprAttrs * nested = new ExprAttrs;
|
||||||
|
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
||||||
|
attrs = nested;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ExprAttrs *nested = new ExprAttrs;
|
||||||
|
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
||||||
|
attrs = nested;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Expr insertion.
|
||||||
|
// ==========================
|
||||||
|
if (i->symbol) {
|
||||||
|
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||||
|
if (j != attrs->attrs.end()) {
|
||||||
|
// This attr path is already defined. However, if both
|
||||||
|
// e and the expr pointed by the attr path are two attribute sets,
|
||||||
|
// we want to merge them.
|
||||||
|
// Otherwise, throw an error.
|
||||||
|
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||||
|
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||||
|
if (jAttrs && ae) {
|
||||||
|
for (auto & ad : ae->attrs) {
|
||||||
|
auto j2 = jAttrs->attrs.find(ad.first);
|
||||||
|
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||||
|
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||||
|
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||||
|
}
|
||||||
|
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||||
|
} else {
|
||||||
|
dupAttr(attrPath, pos, j->second.pos);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// This attr path is not defined. Let's create it.
|
||||||
|
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||||
|
e->setName(i->symbol);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
|
||||||
|
{
|
||||||
|
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||||
|
[] (const auto & a, const auto & b) {
|
||||||
|
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||||
|
});
|
||||||
|
|
||||||
|
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||||
|
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||||
|
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||||
|
continue;
|
||||||
|
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||||
|
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||||
|
}
|
||||||
|
if (duplicate)
|
||||||
|
throw ParseError({
|
||||||
|
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||||
|
.pos = positions[duplicate->second]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (arg && formals->has(arg))
|
||||||
|
throw ParseError({
|
||||||
|
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||||
|
.pos = positions[pos]
|
||||||
|
});
|
||||||
|
|
||||||
|
return formals;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||||
|
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||||
|
{
|
||||||
|
if (es.empty()) return new ExprString("");
|
||||||
|
|
||||||
|
/* Figure out the minimum indentation. Note that by design
|
||||||
|
whitespace-only final lines are not taken into account. (So
|
||||||
|
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||||
|
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||||
|
size_t minIndent = 1000000;
|
||||||
|
size_t curIndent = 0;
|
||||||
|
for (auto & [i_pos, i] : es) {
|
||||||
|
auto * str = std::get_if<StringToken>(&i);
|
||||||
|
if (!str || !str->hasIndentation) {
|
||||||
|
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||||
|
if (atStartOfLine) {
|
||||||
|
atStartOfLine = false;
|
||||||
|
if (curIndent < minIndent) minIndent = curIndent;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (size_t j = 0; j < str->l; ++j) {
|
||||||
|
if (atStartOfLine) {
|
||||||
|
if (str->p[j] == ' ')
|
||||||
|
curIndent++;
|
||||||
|
else if (str->p[j] == '\n') {
|
||||||
|
/* Empty line, doesn't influence minimum
|
||||||
|
indentation. */
|
||||||
|
curIndent = 0;
|
||||||
|
} else {
|
||||||
|
atStartOfLine = false;
|
||||||
|
if (curIndent < minIndent) minIndent = curIndent;
|
||||||
|
}
|
||||||
|
} else if (str->p[j] == '\n') {
|
||||||
|
atStartOfLine = true;
|
||||||
|
curIndent = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Strip spaces from each line. */
|
||||||
|
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||||
|
atStartOfLine = true;
|
||||||
|
size_t curDropped = 0;
|
||||||
|
size_t n = es.size();
|
||||||
|
auto i = es.begin();
|
||||||
|
const auto trimExpr = [&] (Expr * e) {
|
||||||
|
atStartOfLine = false;
|
||||||
|
curDropped = 0;
|
||||||
|
es2->emplace_back(i->first, e);
|
||||||
|
};
|
||||||
|
const auto trimString = [&] (const StringToken & t) {
|
||||||
|
std::string s2;
|
||||||
|
for (size_t j = 0; j < t.l; ++j) {
|
||||||
|
if (atStartOfLine) {
|
||||||
|
if (t.p[j] == ' ') {
|
||||||
|
if (curDropped++ >= minIndent)
|
||||||
|
s2 += t.p[j];
|
||||||
|
}
|
||||||
|
else if (t.p[j] == '\n') {
|
||||||
|
curDropped = 0;
|
||||||
|
s2 += t.p[j];
|
||||||
|
} else {
|
||||||
|
atStartOfLine = false;
|
||||||
|
curDropped = 0;
|
||||||
|
s2 += t.p[j];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
s2 += t.p[j];
|
||||||
|
if (t.p[j] == '\n') atStartOfLine = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remove the last line if it is empty and consists only of
|
||||||
|
spaces. */
|
||||||
|
if (n == 1) {
|
||||||
|
std::string::size_type p = s2.find_last_of('\n');
|
||||||
|
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||||
|
s2 = std::string(s2, 0, p + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
||||||
|
};
|
||||||
|
for (; i != es.end(); ++i, --n) {
|
||||||
|
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* If this is a single string, then don't do a concatenation. */
|
||||||
|
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||||
|
auto *const result = (*es2)[0].second;
|
||||||
|
delete es2;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
return new ExprConcatStrings(pos, true, es2);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline PosIdx ParserState::at(const ParserLocation & loc)
|
||||||
|
{
|
||||||
|
return positions.add(origin, loc.first_line, loc.first_column);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -5,9 +5,9 @@
|
||||||
%defines
|
%defines
|
||||||
/* %no-lines */
|
/* %no-lines */
|
||||||
%parse-param { void * scanner }
|
%parse-param { void * scanner }
|
||||||
%parse-param { nix::ParseData * data }
|
%parse-param { nix::ParserState * state }
|
||||||
%lex-param { void * scanner }
|
%lex-param { void * scanner }
|
||||||
%lex-param { nix::ParseData * data }
|
%lex-param { nix::ParserState * state }
|
||||||
%expect 1
|
%expect 1
|
||||||
%expect-rr 1
|
%expect-rr 1
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
#include <variant>
|
#include <variant>
|
||||||
|
|
||||||
|
#include "finally.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "users.hh"
|
#include "users.hh"
|
||||||
|
|
||||||
|
@ -25,63 +26,26 @@
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "eval-settings.hh"
|
#include "eval-settings.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
|
#include "parser-state.hh"
|
||||||
|
|
||||||
|
#define YYLTYPE ::nix::ParserLocation
|
||||||
|
#define YY_DECL int yylex \
|
||||||
|
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state)
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
#define YYLTYPE ::nix::ParserLocation
|
Expr * parseExprFromBuf(
|
||||||
struct ParserLocation
|
char * text,
|
||||||
{
|
size_t length,
|
||||||
int first_line, first_column;
|
Pos::Origin origin,
|
||||||
int last_line, last_column;
|
const SourcePath & basePath,
|
||||||
|
SymbolTable & symbols,
|
||||||
// backup to recover from yyless(0)
|
PosTable & positions,
|
||||||
int stashed_first_line, stashed_first_column;
|
const ref<InputAccessor> rootFS,
|
||||||
int stashed_last_line, stashed_last_column;
|
const Expr::AstSymbols & astSymbols);
|
||||||
|
|
||||||
void stash() {
|
|
||||||
stashed_first_line = first_line;
|
|
||||||
stashed_first_column = first_column;
|
|
||||||
stashed_last_line = last_line;
|
|
||||||
stashed_last_column = last_column;
|
|
||||||
}
|
|
||||||
|
|
||||||
void unstash() {
|
|
||||||
first_line = stashed_first_line;
|
|
||||||
first_column = stashed_first_column;
|
|
||||||
last_line = stashed_last_line;
|
|
||||||
last_column = stashed_last_column;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ParseData
|
|
||||||
{
|
|
||||||
EvalState & state;
|
|
||||||
SymbolTable & symbols;
|
|
||||||
Expr * result;
|
|
||||||
SourcePath basePath;
|
|
||||||
PosTable::Origin origin;
|
|
||||||
std::optional<ErrorInfo> error;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ParserFormals {
|
|
||||||
std::vector<Formal> formals;
|
|
||||||
bool ellipsis = false;
|
|
||||||
};
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// using C a struct allows us to avoid having to define the special
|
|
||||||
// members that using string_view here would implicitly delete.
|
|
||||||
struct StringToken {
|
|
||||||
const char * p;
|
|
||||||
size_t l;
|
|
||||||
bool hasIndentation;
|
|
||||||
operator std::string_view() const { return {p, l}; }
|
|
||||||
};
|
|
||||||
|
|
||||||
#define YY_DECL int yylex \
|
|
||||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -95,242 +59,17 @@ YY_DECL;
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
|
#define CUR_POS state->at(*yylocp)
|
||||||
namespace nix {
|
|
||||||
|
|
||||||
|
|
||||||
static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||||
{
|
{
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
.msg = HintFmt(error),
|
||||||
showAttrPath(state.symbols, attrPath), state.positions[prevPos]),
|
.pos = state->positions[state->at(*loc)]
|
||||||
.errPos = state.positions[pos]
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
|
||||||
{
|
|
||||||
throw ParseError({
|
|
||||||
.msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static void addAttr(ExprAttrs * attrs, AttrPath && attrPath,
|
|
||||||
Expr * e, const PosIdx pos, const nix::EvalState & state)
|
|
||||||
{
|
|
||||||
AttrPath::iterator i;
|
|
||||||
// All attrpaths have at least one attr
|
|
||||||
assert(!attrPath.empty());
|
|
||||||
// Checking attrPath validity.
|
|
||||||
// ===========================
|
|
||||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
|
||||||
if (i->symbol) {
|
|
||||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
|
||||||
if (j != attrs->attrs.end()) {
|
|
||||||
if (!j->second.inherited) {
|
|
||||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
|
||||||
if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos);
|
|
||||||
attrs = attrs2;
|
|
||||||
} else
|
|
||||||
dupAttr(state, attrPath, pos, j->second.pos);
|
|
||||||
} else {
|
|
||||||
ExprAttrs * nested = new ExprAttrs;
|
|
||||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
|
||||||
attrs = nested;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ExprAttrs *nested = new ExprAttrs;
|
|
||||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
|
||||||
attrs = nested;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Expr insertion.
|
|
||||||
// ==========================
|
|
||||||
if (i->symbol) {
|
|
||||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
|
||||||
if (j != attrs->attrs.end()) {
|
|
||||||
// This attr path is already defined. However, if both
|
|
||||||
// e and the expr pointed by the attr path are two attribute sets,
|
|
||||||
// we want to merge them.
|
|
||||||
// Otherwise, throw an error.
|
|
||||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
|
||||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
|
||||||
if (jAttrs && ae) {
|
|
||||||
for (auto & ad : ae->attrs) {
|
|
||||||
auto j2 = jAttrs->attrs.find(ad.first);
|
|
||||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
|
||||||
dupAttr(state, ad.first, j2->second.pos, ad.second.pos);
|
|
||||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
|
||||||
}
|
|
||||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
|
||||||
} else {
|
|
||||||
dupAttr(state, attrPath, pos, j->second.pos);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// This attr path is not defined. Let's create it.
|
|
||||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
|
||||||
e->setName(i->symbol);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
|
||||||
PosIdx pos = noPos, Symbol arg = {})
|
|
||||||
{
|
|
||||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
|
||||||
[] (const auto & a, const auto & b) {
|
|
||||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
|
||||||
});
|
|
||||||
|
|
||||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
|
||||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
|
||||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
|
||||||
continue;
|
|
||||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
|
||||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
|
||||||
}
|
|
||||||
if (duplicate)
|
|
||||||
throw ParseError({
|
|
||||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]),
|
|
||||||
.errPos = data.state.positions[duplicate->second]
|
|
||||||
});
|
|
||||||
|
|
||||||
Formals result;
|
|
||||||
result.ellipsis = formals->ellipsis;
|
|
||||||
result.formals = std::move(formals->formals);
|
|
||||||
|
|
||||||
if (arg && result.has(arg))
|
|
||||||
throw ParseError({
|
|
||||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]),
|
|
||||||
.errPos = data.state.positions[pos]
|
|
||||||
});
|
|
||||||
|
|
||||||
delete formals;
|
|
||||||
return new Formals(std::move(result));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
|
|
||||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
|
||||||
{
|
|
||||||
if (es.empty()) return new ExprString("");
|
|
||||||
|
|
||||||
/* Figure out the minimum indentation. Note that by design
|
|
||||||
whitespace-only final lines are not taken into account. (So
|
|
||||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
|
||||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
|
||||||
size_t minIndent = 1000000;
|
|
||||||
size_t curIndent = 0;
|
|
||||||
for (auto & [i_pos, i] : es) {
|
|
||||||
auto * str = std::get_if<StringToken>(&i);
|
|
||||||
if (!str || !str->hasIndentation) {
|
|
||||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
|
||||||
if (atStartOfLine) {
|
|
||||||
atStartOfLine = false;
|
|
||||||
if (curIndent < minIndent) minIndent = curIndent;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
for (size_t j = 0; j < str->l; ++j) {
|
|
||||||
if (atStartOfLine) {
|
|
||||||
if (str->p[j] == ' ')
|
|
||||||
curIndent++;
|
|
||||||
else if (str->p[j] == '\n') {
|
|
||||||
/* Empty line, doesn't influence minimum
|
|
||||||
indentation. */
|
|
||||||
curIndent = 0;
|
|
||||||
} else {
|
|
||||||
atStartOfLine = false;
|
|
||||||
if (curIndent < minIndent) minIndent = curIndent;
|
|
||||||
}
|
|
||||||
} else if (str->p[j] == '\n') {
|
|
||||||
atStartOfLine = true;
|
|
||||||
curIndent = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Strip spaces from each line. */
|
|
||||||
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
|
||||||
atStartOfLine = true;
|
|
||||||
size_t curDropped = 0;
|
|
||||||
size_t n = es.size();
|
|
||||||
auto i = es.begin();
|
|
||||||
const auto trimExpr = [&] (Expr * e) {
|
|
||||||
atStartOfLine = false;
|
|
||||||
curDropped = 0;
|
|
||||||
es2->emplace_back(i->first, e);
|
|
||||||
};
|
|
||||||
const auto trimString = [&] (const StringToken & t) {
|
|
||||||
std::string s2;
|
|
||||||
for (size_t j = 0; j < t.l; ++j) {
|
|
||||||
if (atStartOfLine) {
|
|
||||||
if (t.p[j] == ' ') {
|
|
||||||
if (curDropped++ >= minIndent)
|
|
||||||
s2 += t.p[j];
|
|
||||||
}
|
|
||||||
else if (t.p[j] == '\n') {
|
|
||||||
curDropped = 0;
|
|
||||||
s2 += t.p[j];
|
|
||||||
} else {
|
|
||||||
atStartOfLine = false;
|
|
||||||
curDropped = 0;
|
|
||||||
s2 += t.p[j];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
s2 += t.p[j];
|
|
||||||
if (t.p[j] == '\n') atStartOfLine = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Remove the last line if it is empty and consists only of
|
|
||||||
spaces. */
|
|
||||||
if (n == 1) {
|
|
||||||
std::string::size_type p = s2.find_last_of('\n');
|
|
||||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
|
||||||
s2 = std::string(s2, 0, p + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
|
||||||
};
|
|
||||||
for (; i != es.end(); ++i, --n) {
|
|
||||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If this is a single string, then don't do a concatenation. */
|
|
||||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
|
||||||
auto *const result = (*es2)[0].second;
|
|
||||||
delete es2;
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
return new ExprConcatStrings(pos, true, es2);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
|
||||||
{
|
|
||||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
|
||||||
}
|
|
||||||
|
|
||||||
#define CUR_POS makeCurPos(*yylocp, data)
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
|
|
||||||
{
|
|
||||||
data->error = {
|
|
||||||
.msg = hintfmt(error),
|
|
||||||
.errPos = data->state.positions[makeCurPos(*loc, data)]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
%}
|
%}
|
||||||
|
|
||||||
|
@ -339,17 +78,17 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
||||||
nix::Expr * e;
|
nix::Expr * e;
|
||||||
nix::ExprList * list;
|
nix::ExprList * list;
|
||||||
nix::ExprAttrs * attrs;
|
nix::ExprAttrs * attrs;
|
||||||
nix::ParserFormals * formals;
|
nix::Formals * formals;
|
||||||
nix::Formal * formal;
|
nix::Formal * formal;
|
||||||
nix::NixInt n;
|
nix::NixInt n;
|
||||||
nix::NixFloat nf;
|
nix::NixFloat nf;
|
||||||
StringToken id; // !!! -> Symbol
|
nix::StringToken id; // !!! -> Symbol
|
||||||
StringToken path;
|
nix::StringToken path;
|
||||||
StringToken uri;
|
nix::StringToken uri;
|
||||||
StringToken str;
|
nix::StringToken str;
|
||||||
std::vector<nix::AttrName> * attrNames;
|
std::vector<nix::AttrName> * attrNames;
|
||||||
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
|
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
|
||||||
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken>>> * ind_string_parts;
|
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts;
|
||||||
}
|
}
|
||||||
|
|
||||||
%type <e> start expr expr_function expr_if expr_op
|
%type <e> start expr expr_function expr_if expr_op
|
||||||
|
@ -389,24 +128,24 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
start: expr { data->result = $1; };
|
start: expr { state->result = $1; };
|
||||||
|
|
||||||
expr: expr_function;
|
expr: expr_function;
|
||||||
|
|
||||||
expr_function
|
expr_function
|
||||||
: ID ':' expr_function
|
: ID ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
{ $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); }
|
||||||
| '{' formals '}' ':' expr_function
|
| '{' formals '}' ':' expr_function
|
||||||
{ $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); }
|
{ $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); }
|
||||||
| '{' formals '}' '@' ID ':' expr_function
|
| '{' formals '}' '@' ID ':' expr_function
|
||||||
{
|
{
|
||||||
auto arg = data->symbols.create($5);
|
auto arg = state->symbols.create($5);
|
||||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||||
}
|
}
|
||||||
| ID '@' '{' formals '}' ':' expr_function
|
| ID '@' '{' formals '}' ':' expr_function
|
||||||
{
|
{
|
||||||
auto arg = data->symbols.create($1);
|
auto arg = state->symbols.create($1);
|
||||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||||
}
|
}
|
||||||
| ASSERT expr ';' expr_function
|
| ASSERT expr ';' expr_function
|
||||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||||
|
@ -415,8 +154,8 @@ expr_function
|
||||||
| LET binds IN_KW expr_function
|
| LET binds IN_KW expr_function
|
||||||
{ if (!$2->dynamicAttrs.empty())
|
{ if (!$2->dynamicAttrs.empty())
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("dynamic attributes not allowed in let"),
|
.msg = HintFmt("dynamic attributes not allowed in let"),
|
||||||
.errPos = data->state.positions[CUR_POS]
|
.pos = state->positions[CUR_POS]
|
||||||
});
|
});
|
||||||
$$ = new ExprLet($2, $4);
|
$$ = new ExprLet($2, $4);
|
||||||
}
|
}
|
||||||
|
@ -430,24 +169,24 @@ expr_if
|
||||||
|
|
||||||
expr_op
|
expr_op
|
||||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); }
|
||||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||||
| expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
| expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); }
|
||||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); }
|
||||||
| expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
| expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); }
|
||||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); }
|
||||||
| expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); }
|
| expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); }
|
||||||
| expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); }
|
| expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); }
|
||||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); }
|
| expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); }
|
||||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); }
|
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); }
|
||||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; }
|
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; }
|
||||||
| expr_op '+' expr_op
|
| expr_op '+' expr_op
|
||||||
{ $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector<std::pair<PosIdx, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
{ $$ = new ExprConcatStrings(state->at(@2), false, new std::vector<std::pair<PosIdx, Expr *> >({{state->at(@1), $1}, {state->at(@3), $3}})); }
|
||||||
| expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
| expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); }
|
||||||
| expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
| expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); }
|
||||||
| expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
| expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); }
|
||||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); }
|
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); }
|
||||||
| expr_app
|
| expr_app
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -470,7 +209,7 @@ expr_select
|
||||||
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
||||||
function named ‘or’, allow stuff like ‘map or [...]’. */
|
function named ‘or’, allow stuff like ‘map or [...]’. */
|
||||||
expr_simple OR_KW
|
expr_simple OR_KW
|
||||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
|
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); }
|
||||||
| expr_simple
|
| expr_simple
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -480,33 +219,33 @@ expr_simple
|
||||||
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||||
$$ = new ExprPos(CUR_POS);
|
$$ = new ExprPos(CUR_POS);
|
||||||
else
|
else
|
||||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
$$ = new ExprVar(CUR_POS, state->symbols.create($1));
|
||||||
}
|
}
|
||||||
| INT_LIT { $$ = new ExprInt($1); }
|
| INT_LIT { $$ = new ExprInt($1); }
|
||||||
| FLOAT_LIT { $$ = new ExprFloat($1); }
|
| FLOAT_LIT { $$ = new ExprFloat($1); }
|
||||||
| '"' string_parts '"' { $$ = $2; }
|
| '"' string_parts '"' { $$ = $2; }
|
||||||
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
|
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
|
||||||
$$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2));
|
$$ = state->stripIndentation(CUR_POS, std::move(*$2));
|
||||||
delete $2;
|
delete $2;
|
||||||
}
|
}
|
||||||
| path_start PATH_END
|
| path_start PATH_END
|
||||||
| path_start string_parts_interpolated PATH_END {
|
| path_start string_parts_interpolated PATH_END {
|
||||||
$2->insert($2->begin(), {makeCurPos(@1, data), $1});
|
$2->insert($2->begin(), {state->at(@1), $1});
|
||||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||||
}
|
}
|
||||||
| SPATH {
|
| SPATH {
|
||||||
std::string path($1.p + 1, $1.l - 2);
|
std::string path($1.p + 1, $1.l - 2);
|
||||||
$$ = new ExprCall(CUR_POS,
|
$$ = new ExprCall(CUR_POS,
|
||||||
new ExprVar(data->symbols.create("__findFile")),
|
new ExprVar(state->s.findFile),
|
||||||
{new ExprVar(data->symbols.create("__nixPath")),
|
{new ExprVar(state->s.nixPath),
|
||||||
new ExprString(std::move(path))});
|
new ExprString(std::move(path))});
|
||||||
}
|
}
|
||||||
| URI {
|
| URI {
|
||||||
static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals);
|
static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals);
|
||||||
if (noURLLiterals)
|
if (noURLLiterals)
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("URL literals are disabled"),
|
.msg = HintFmt("URL literals are disabled"),
|
||||||
.errPos = data->state.positions[CUR_POS]
|
.pos = state->positions[CUR_POS]
|
||||||
});
|
});
|
||||||
$$ = new ExprString(std::string($1));
|
$$ = new ExprString(std::string($1));
|
||||||
}
|
}
|
||||||
|
@ -514,7 +253,7 @@ expr_simple
|
||||||
/* Let expressions `let {..., body = ...}' are just desugared
|
/* Let expressions `let {..., body = ...}' are just desugared
|
||||||
into `(rec {..., body = ...}).body'. */
|
into `(rec {..., body = ...}).body'. */
|
||||||
| LET '{' binds '}'
|
| LET '{' binds '}'
|
||||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, data->symbols.create("body")); }
|
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); }
|
||||||
| REC '{' binds '}'
|
| REC '{' binds '}'
|
||||||
{ $3->recursive = true; $$ = $3; }
|
{ $3->recursive = true; $$ = $3; }
|
||||||
| '{' binds '}'
|
| '{' binds '}'
|
||||||
|
@ -530,23 +269,23 @@ string_parts
|
||||||
|
|
||||||
string_parts_interpolated
|
string_parts_interpolated
|
||||||
: string_parts_interpolated STR
|
: string_parts_interpolated STR
|
||||||
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
|
{ $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); }
|
||||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), $2); }
|
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(state->at(@1), $2); }
|
||||||
| STR DOLLAR_CURLY expr '}' {
|
| STR DOLLAR_CURLY expr '}' {
|
||||||
$$ = new std::vector<std::pair<PosIdx, Expr *>>;
|
$$ = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||||
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
|
$$->emplace_back(state->at(@1), new ExprString(std::string($1)));
|
||||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
$$->emplace_back(state->at(@2), $3);
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
path_start
|
path_start
|
||||||
: PATH {
|
: PATH {
|
||||||
Path path(absPath({$1.p, $1.l}, data->basePath.path.abs()));
|
Path path(absPath({$1.p, $1.l}, state->basePath.path.abs()));
|
||||||
/* add back in the trailing '/' to the first segment */
|
/* add back in the trailing '/' to the first segment */
|
||||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||||
path += "/";
|
path += "/";
|
||||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||||
}
|
}
|
||||||
| HPATH {
|
| HPATH {
|
||||||
if (evalSettings.pureEval) {
|
if (evalSettings.pureEval) {
|
||||||
|
@ -556,24 +295,24 @@ path_start
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||||
}
|
}
|
||||||
;
|
;
|
||||||
|
|
||||||
ind_string_parts
|
ind_string_parts
|
||||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); }
|
||||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||||
| { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; }
|
| { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; }
|
||||||
;
|
;
|
||||||
|
|
||||||
binds
|
binds
|
||||||
: binds attrpath '=' expr ';' { $$ = $1; addAttr($$, std::move(*$2), $4, makeCurPos(@2, data), data->state); delete $2; }
|
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
|
||||||
| binds INHERIT attrs ';'
|
| binds INHERIT attrs ';'
|
||||||
{ $$ = $1;
|
{ $$ = $1;
|
||||||
for (auto & i : *$3) {
|
for (auto & i : *$3) {
|
||||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||||
dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
|
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
|
||||||
auto pos = makeCurPos(@3, data);
|
auto pos = state->at(@3);
|
||||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||||
}
|
}
|
||||||
delete $3;
|
delete $3;
|
||||||
|
@ -583,48 +322,48 @@ binds
|
||||||
/* !!! Should ensure sharing of the expression in $4. */
|
/* !!! Should ensure sharing of the expression in $4. */
|
||||||
for (auto & i : *$6) {
|
for (auto & i : *$6) {
|
||||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||||
dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
|
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
|
||||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
|
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6)));
|
||||||
}
|
}
|
||||||
delete $6;
|
delete $6;
|
||||||
}
|
}
|
||||||
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
|
| { $$ = new ExprAttrs(state->at(@0)); }
|
||||||
;
|
;
|
||||||
|
|
||||||
attrs
|
attrs
|
||||||
: attrs attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($2))); }
|
: attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
|
||||||
| attrs string_attr
|
| attrs string_attr
|
||||||
{ $$ = $1;
|
{ $$ = $1;
|
||||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
throw ParseError({
|
throw ParseError({
|
||||||
.msg = hintfmt("dynamic attributes not allowed in inherit"),
|
.msg = HintFmt("dynamic attributes not allowed in inherit"),
|
||||||
.errPos = data->state.positions[makeCurPos(@2, data)]
|
.pos = state->positions[state->at(@2)]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
| { $$ = new AttrPath; }
|
| { $$ = new AttrPath; }
|
||||||
;
|
;
|
||||||
|
|
||||||
attrpath
|
attrpath
|
||||||
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($3))); }
|
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); }
|
||||||
| attrpath '.' string_attr
|
| attrpath '.' string_attr
|
||||||
{ $$ = $1;
|
{ $$ = $1;
|
||||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
$$->push_back(AttrName($3));
|
$$->push_back(AttrName($3));
|
||||||
}
|
}
|
||||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(state->symbols.create($1))); }
|
||||||
| string_attr
|
| string_attr
|
||||||
{ $$ = new std::vector<AttrName>;
|
{ $$ = new std::vector<AttrName>;
|
||||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||||
if (str) {
|
if (str) {
|
||||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||||
delete str;
|
delete str;
|
||||||
} else
|
} else
|
||||||
$$->push_back(AttrName($1));
|
$$->push_back(AttrName($1));
|
||||||
|
@ -650,226 +389,52 @@ formals
|
||||||
: formal ',' formals
|
: formal ',' formals
|
||||||
{ $$ = $3; $$->formals.emplace_back(*$1); delete $1; }
|
{ $$ = $3; $$->formals.emplace_back(*$1); delete $1; }
|
||||||
| formal
|
| formal
|
||||||
{ $$ = new ParserFormals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
{ $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||||
|
|
|
|
||||||
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
{ $$ = new Formals; $$->ellipsis = false; }
|
||||||
| ELLIPSIS
|
| ELLIPSIS
|
||||||
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
{ $$ = new Formals; $$->ellipsis = true; }
|
||||||
;
|
;
|
||||||
|
|
||||||
formal
|
formal
|
||||||
: ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; }
|
: ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; }
|
||||||
| ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; }
|
| ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; }
|
||||||
;
|
;
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
|
||||||
|
|
||||||
#include <sys/types.h>
|
|
||||||
#include <sys/stat.h>
|
|
||||||
#include <fcntl.h>
|
|
||||||
#include <unistd.h>
|
|
||||||
|
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "filetransfer.hh"
|
|
||||||
#include "tarball.hh"
|
|
||||||
#include "store-api.hh"
|
|
||||||
#include "flake/flake.hh"
|
|
||||||
#include "fs-input-accessor.hh"
|
|
||||||
#include "memory-input-accessor.hh"
|
|
||||||
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
unsigned long Expr::nrExprs = 0;
|
Expr * parseExprFromBuf(
|
||||||
|
|
||||||
Expr * EvalState::parse(
|
|
||||||
char * text,
|
char * text,
|
||||||
size_t length,
|
size_t length,
|
||||||
Pos::Origin origin,
|
Pos::Origin origin,
|
||||||
const SourcePath & basePath,
|
const SourcePath & basePath,
|
||||||
std::shared_ptr<StaticEnv> & staticEnv)
|
SymbolTable & symbols,
|
||||||
|
PosTable & positions,
|
||||||
|
const ref<InputAccessor> rootFS,
|
||||||
|
const Expr::AstSymbols & astSymbols)
|
||||||
{
|
{
|
||||||
yyscan_t scanner;
|
yyscan_t scanner;
|
||||||
ParseData data {
|
ParserState state {
|
||||||
.state = *this,
|
|
||||||
.symbols = symbols,
|
.symbols = symbols,
|
||||||
|
.positions = positions,
|
||||||
.basePath = basePath,
|
.basePath = basePath,
|
||||||
.origin = {origin},
|
.origin = {origin},
|
||||||
|
.rootFS = rootFS,
|
||||||
|
.s = astSymbols,
|
||||||
};
|
};
|
||||||
|
|
||||||
yylex_init(&scanner);
|
yylex_init(&scanner);
|
||||||
|
Finally _destroy([&] { yylex_destroy(scanner); });
|
||||||
|
|
||||||
yy_scan_buffer(text, length, scanner);
|
yy_scan_buffer(text, length, scanner);
|
||||||
int res = yyparse(scanner, &data);
|
yyparse(scanner, &state);
|
||||||
yylex_destroy(scanner);
|
|
||||||
|
|
||||||
if (res) throw ParseError(data.error.value());
|
return state.result;
|
||||||
|
|
||||||
data.result->bindVars(*this, staticEnv);
|
|
||||||
|
|
||||||
return data.result;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
SourcePath resolveExprPath(SourcePath path)
|
|
||||||
{
|
|
||||||
unsigned int followCount = 0, maxFollow = 1024;
|
|
||||||
|
|
||||||
/* If `path' is a symlink, follow it. This is so that relative
|
|
||||||
path references work. */
|
|
||||||
while (!path.path.isRoot()) {
|
|
||||||
// Basic cycle/depth limit to avoid infinite loops.
|
|
||||||
if (++followCount >= maxFollow)
|
|
||||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
|
||||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
|
||||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
|
||||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
|
||||||
}
|
|
||||||
|
|
||||||
/* If `path' refers to a directory, append `/default.nix'. */
|
|
||||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
|
||||||
return path + "default.nix";
|
|
||||||
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
|
||||||
{
|
|
||||||
return parseExprFromFile(path, staticBaseEnv);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
|
||||||
{
|
|
||||||
auto buffer = path.resolveSymlinks().readFile();
|
|
||||||
// readFile hopefully have left some extra space for terminators
|
|
||||||
buffer.append("\0\0", 2);
|
|
||||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
|
||||||
{
|
|
||||||
auto s = make_ref<std::string>(std::move(s_));
|
|
||||||
s->append("\0\0", 2);
|
|
||||||
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
|
||||||
{
|
|
||||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Expr * EvalState::parseStdin()
|
|
||||||
{
|
|
||||||
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
|
||||||
auto buffer = drainFD(0);
|
|
||||||
// drainFD should have left some extra space for terminators
|
|
||||||
buffer.append("\0\0", 2);
|
|
||||||
auto s = make_ref<std::string>(std::move(buffer));
|
|
||||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
SourcePath EvalState::findFile(const std::string_view path)
|
|
||||||
{
|
|
||||||
return findFile(searchPath, path);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
|
||||||
{
|
|
||||||
for (auto & i : searchPath.elements) {
|
|
||||||
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
|
||||||
|
|
||||||
if (!suffixOpt) continue;
|
|
||||||
auto suffix = *suffixOpt;
|
|
||||||
|
|
||||||
auto rOpt = resolveSearchPathPath(i.path);
|
|
||||||
if (!rOpt) continue;
|
|
||||||
auto r = *rOpt;
|
|
||||||
|
|
||||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
|
||||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasPrefix(path, "nix/"))
|
|
||||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
|
||||||
|
|
||||||
debugThrow(ThrownError({
|
|
||||||
.msg = hintfmt(evalSettings.pureEval
|
|
||||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
|
||||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
|
||||||
path),
|
|
||||||
.errPos = positions[pos]
|
|
||||||
}), 0, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
|
||||||
{
|
|
||||||
auto & value = value0.s;
|
|
||||||
auto i = searchPathResolved.find(value);
|
|
||||||
if (i != searchPathResolved.end()) return i->second;
|
|
||||||
|
|
||||||
std::optional<std::string> res;
|
|
||||||
|
|
||||||
if (EvalSettings::isPseudoUrl(value)) {
|
|
||||||
try {
|
|
||||||
auto storePath = fetchers::downloadTarball(
|
|
||||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
|
||||||
res = { store->toRealPath(storePath) };
|
|
||||||
} catch (FileTransferError & e) {
|
|
||||||
logWarning({
|
|
||||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
else if (hasPrefix(value, "flake:")) {
|
|
||||||
experimentalFeatureSettings.require(Xp::Flakes);
|
|
||||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
|
||||||
debug("fetching flake search path element '%s''", value);
|
|
||||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
|
||||||
res = { store->toRealPath(storePath) };
|
|
||||||
}
|
|
||||||
|
|
||||||
else {
|
|
||||||
auto path = absPath(value);
|
|
||||||
|
|
||||||
/* Allow access to paths in the search path. */
|
|
||||||
if (initAccessControl) {
|
|
||||||
allowPath(path);
|
|
||||||
if (store->isInStore(path)) {
|
|
||||||
try {
|
|
||||||
StorePathSet closure;
|
|
||||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
|
||||||
for (auto & p : closure)
|
|
||||||
allowPath(p);
|
|
||||||
} catch (InvalidPath &) { }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathExists(path))
|
|
||||||
res = { path };
|
|
||||||
else {
|
|
||||||
logWarning({
|
|
||||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
|
||||||
});
|
|
||||||
res = std::nullopt;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (res)
|
|
||||||
debug("resolved search path element '%s' to '%s'", value, *res);
|
|
||||||
else
|
|
||||||
debug("failed to resolve search path element '%s'", value);
|
|
||||||
|
|
||||||
searchPathResolved.emplace(value, res);
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#include "eval.hh"
|
#include "eval.hh"
|
||||||
#include "fs-input-accessor.hh"
|
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path)
|
||||||
return {rootFS, std::move(path)};
|
return {rootFS, std::move(path)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SourcePath EvalState::rootPath(PathView path)
|
||||||
|
{
|
||||||
|
return {rootFS, CanonPath(absPath(path))};
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
48
src/libexpr/pos-idx.hh
Normal file
48
src/libexpr/pos-idx.hh
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <cinttypes>
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class PosIdx
|
||||||
|
{
|
||||||
|
friend class PosTable;
|
||||||
|
|
||||||
|
private:
|
||||||
|
uint32_t id;
|
||||||
|
|
||||||
|
explicit PosIdx(uint32_t id)
|
||||||
|
: id(id)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
PosIdx()
|
||||||
|
: id(0)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
explicit operator bool() const
|
||||||
|
{
|
||||||
|
return id > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator<(const PosIdx other) const
|
||||||
|
{
|
||||||
|
return id < other.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator==(const PosIdx other) const
|
||||||
|
{
|
||||||
|
return id == other.id;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool operator!=(const PosIdx other) const
|
||||||
|
{
|
||||||
|
return id != other.id;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
inline PosIdx noPos = {};
|
||||||
|
|
||||||
|
}
|
83
src/libexpr/pos-table.hh
Normal file
83
src/libexpr/pos-table.hh
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <cinttypes>
|
||||||
|
#include <numeric>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "chunked-vector.hh"
|
||||||
|
#include "pos-idx.hh"
|
||||||
|
#include "position.hh"
|
||||||
|
|
||||||
|
namespace nix {
|
||||||
|
|
||||||
|
class PosTable
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
class Origin
|
||||||
|
{
|
||||||
|
friend PosTable;
|
||||||
|
private:
|
||||||
|
// must always be invalid by default, add() replaces this with the actual value.
|
||||||
|
// subsequent add() calls use this index as a token to quickly check whether the
|
||||||
|
// current origins.back() can be reused or not.
|
||||||
|
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||||
|
|
||||||
|
// Used for searching in PosTable::[].
|
||||||
|
explicit Origin(uint32_t idx)
|
||||||
|
: idx(idx)
|
||||||
|
, origin{std::monostate()}
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
const Pos::Origin origin;
|
||||||
|
|
||||||
|
Origin(Pos::Origin origin)
|
||||||
|
: origin(origin)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Offset
|
||||||
|
{
|
||||||
|
uint32_t line, column;
|
||||||
|
};
|
||||||
|
|
||||||
|
private:
|
||||||
|
std::vector<Origin> origins;
|
||||||
|
ChunkedVector<Offset, 8192> offsets;
|
||||||
|
|
||||||
|
public:
|
||||||
|
PosTable()
|
||||||
|
: offsets(1024)
|
||||||
|
{
|
||||||
|
origins.reserve(1024);
|
||||||
|
}
|
||||||
|
|
||||||
|
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
||||||
|
{
|
||||||
|
const auto idx = offsets.add({line, column}).second;
|
||||||
|
if (origins.empty() || origins.back().idx != origin.idx) {
|
||||||
|
origin.idx = idx;
|
||||||
|
origins.push_back(origin);
|
||||||
|
}
|
||||||
|
return PosIdx(idx + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
Pos operator[](PosIdx p) const
|
||||||
|
{
|
||||||
|
if (p.id == 0 || p.id > offsets.size())
|
||||||
|
return {};
|
||||||
|
const auto idx = p.id - 1;
|
||||||
|
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
||||||
|
this is guaranteed to never rewind origin.begin because the first
|
||||||
|
key is always 0. */
|
||||||
|
const auto pastOrigin = std::upper_bound(
|
||||||
|
origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||||
|
const auto origin = *std::prev(pastOrigin);
|
||||||
|
const auto offset = offsets[idx];
|
||||||
|
return {offset.line, offset.column, origin.origin};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
|
@ -39,10 +39,6 @@ namespace nix {
|
||||||
* Miscellaneous
|
* Miscellaneous
|
||||||
*************************************************************/
|
*************************************************************/
|
||||||
|
|
||||||
|
|
||||||
InvalidPathError::InvalidPathError(const Path & path) :
|
|
||||||
EvalError("path '%s' is not valid", path), path(path) {}
|
|
||||||
|
|
||||||
StringMap EvalState::realiseContext(const NixStringContext & context)
|
StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
{
|
{
|
||||||
std::vector<DerivedPath::Built> drvs;
|
std::vector<DerivedPath::Built> drvs;
|
||||||
|
@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
for (auto & c : context) {
|
for (auto & c : context) {
|
||||||
auto ensureValid = [&](const StorePath & p) {
|
auto ensureValid = [&](const StorePath & p) {
|
||||||
if (!store->isValidPath(p))
|
if (!store->isValidPath(p))
|
||||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
error<InvalidPathError>(store->printStorePath(p)).debugThrow();
|
||||||
};
|
};
|
||||||
std::visit(overloaded {
|
std::visit(overloaded {
|
||||||
[&](const NixStringContextElem::Built & b) {
|
[&](const NixStringContextElem::Built & b) {
|
||||||
|
@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
if (drvs.empty()) return {};
|
if (drvs.empty()) return {};
|
||||||
|
|
||||||
if (!evalSettings.enableImportFromDerivation)
|
if (!evalSettings.enableImportFromDerivation)
|
||||||
debugThrowLastTrace(Error(
|
error<EvalError>(
|
||||||
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
|
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
|
||||||
drvs.begin()->to_string(*store)));
|
drvs.begin()->to_string(*store)
|
||||||
|
).debugThrow();
|
||||||
|
|
||||||
/* Build/substitute the context. */
|
/* Build/substitute the context. */
|
||||||
std::vector<DerivedPath> buildReqs;
|
std::vector<DerivedPath> buildReqs;
|
||||||
|
@ -112,7 +109,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||||
for (auto & outputPath : outputsToCopyAndAllow) {
|
for (auto & outputPath : outputsToCopyAndAllow) {
|
||||||
/* Add the output of this derivations to the allowed
|
/* Add the output of this derivations to the allowed
|
||||||
paths. */
|
paths. */
|
||||||
allowPath(store->toRealPath(outputPath));
|
allowPath(outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
|
@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||||
|
|
||||||
void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||||
if (!handle)
|
if (!handle)
|
||||||
state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror()));
|
state.error<EvalError>("could not open '%1%': %2%", path, dlerror()).debugThrow();
|
||||||
|
|
||||||
dlerror();
|
dlerror();
|
||||||
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
|
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
|
||||||
if(!func) {
|
if(!func) {
|
||||||
char *message = dlerror();
|
char *message = dlerror();
|
||||||
if (message)
|
if (message)
|
||||||
state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message));
|
state.error<EvalError>("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow();
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path));
|
state.error<EvalError>("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
(func)(state, v);
|
(func)(state, v);
|
||||||
|
@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
auto elems = args[0]->listElems();
|
auto elems = args[0]->listElems();
|
||||||
auto count = args[0]->listSize();
|
auto count = args[0]->listSize();
|
||||||
if (count == 0)
|
if (count == 0)
|
||||||
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
state.error<EvalError>("at least one argument to 'exec' required").atPos(pos).debugThrow();
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto program = state.coerceToString(pos, *elems[0], context,
|
auto program = state.coerceToString(pos, *elems[0], context,
|
||||||
"while evaluating the first element of the argument passed to builtins.exec",
|
"while evaluating the first element of the argument passed to builtins.exec",
|
||||||
|
@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
try {
|
try {
|
||||||
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
||||||
} catch (InvalidPathError & e) {
|
} catch (InvalidPathError & e) {
|
||||||
state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow<EvalError>();
|
state.error<EvalError>("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
auto output = runProgram(program, true, commandArgs);
|
auto output = runProgram(program, true, commandArgs);
|
||||||
|
@ -582,7 +579,7 @@ struct CompareValues
|
||||||
if (v1->type() == nInt && v2->type() == nFloat)
|
if (v1->type() == nInt && v2->type() == nFloat)
|
||||||
return v1->integer < v2->fpoint;
|
return v1->integer < v2->fpoint;
|
||||||
if (v1->type() != v2->type())
|
if (v1->type() != v2->type())
|
||||||
state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
state.error<EvalError>("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow();
|
||||||
// Allow selecting a subset of enum values
|
// Allow selecting a subset of enum values
|
||||||
#pragma GCC diagnostic push
|
#pragma GCC diagnostic push
|
||||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||||
|
@ -610,7 +607,7 @@ struct CompareValues
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
state.error<EvalError>("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow();
|
||||||
#pragma GCC diagnostic pop
|
#pragma GCC diagnostic pop
|
||||||
}
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
|
@ -637,7 +634,7 @@ static Bindings::iterator getAttr(
|
||||||
{
|
{
|
||||||
Bindings::iterator value = attrSet->find(attrSym);
|
Bindings::iterator value = attrSet->find(attrSym);
|
||||||
if (value == attrSet->end()) {
|
if (value == attrSet->end()) {
|
||||||
state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow<TypeError>();
|
state.error<TypeError>("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow();
|
||||||
}
|
}
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
@ -757,8 +754,8 @@ static RegisterPrimOp primop_break({
|
||||||
if (state.debugRepl && !state.debugTraces.empty()) {
|
if (state.debugRepl && !state.debugTraces.empty()) {
|
||||||
auto error = Error(ErrorInfo {
|
auto error = Error(ErrorInfo {
|
||||||
.level = lvlInfo,
|
.level = lvlInfo,
|
||||||
.msg = hintfmt("breakpoint reached"),
|
.msg = HintFmt("breakpoint reached"),
|
||||||
.errPos = state.positions[pos],
|
.pos = state.positions[pos],
|
||||||
});
|
});
|
||||||
|
|
||||||
auto & dt = state.debugTraces.front();
|
auto & dt = state.debugTraces.front();
|
||||||
|
@ -768,8 +765,8 @@ static RegisterPrimOp primop_break({
|
||||||
// If the user elects to quit the repl, throw an exception.
|
// If the user elects to quit the repl, throw an exception.
|
||||||
throw Error(ErrorInfo{
|
throw Error(ErrorInfo{
|
||||||
.level = lvlInfo,
|
.level = lvlInfo,
|
||||||
.msg = hintfmt("quit the debugger"),
|
.msg = HintFmt("quit the debugger"),
|
||||||
.errPos = nullptr,
|
.pos = nullptr,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -790,7 +787,7 @@ static RegisterPrimOp primop_abort({
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto s = state.coerceToString(pos, *args[0], context,
|
auto s = state.coerceToString(pos, *args[0], context,
|
||||||
"while evaluating the error message passed to builtins.abort").toOwned();
|
"while evaluating the error message passed to builtins.abort").toOwned();
|
||||||
state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
|
state.error<Abort>("evaluation aborted with the following error message: '%1%'", s).debugThrow();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -809,7 +806,7 @@ static RegisterPrimOp primop_throw({
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto s = state.coerceToString(pos, *args[0], context,
|
auto s = state.coerceToString(pos, *args[0], context,
|
||||||
"while evaluating the error message passed to builtin.throw").toOwned();
|
"while evaluating the error message passed to builtin.throw").toOwned();
|
||||||
state.debugThrowLastTrace(ThrownError(s));
|
state.error<ThrownError>(s).debugThrow();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -823,7 +820,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
||||||
auto message = state.coerceToString(pos, *args[0], context,
|
auto message = state.coerceToString(pos, *args[0], context,
|
||||||
"while evaluating the error message passed to builtins.addErrorContext",
|
"while evaluating the error message passed to builtins.addErrorContext",
|
||||||
false, false).toOwned();
|
false, false).toOwned();
|
||||||
e.addTrace(nullptr, hintfmt(message), true);
|
e.addTrace(nullptr, HintFmt(message), true);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -997,7 +994,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
||||||
if (args[0]->type() == nString)
|
if (args[0]->type() == nString)
|
||||||
printError("trace: %1%", args[0]->string_view());
|
printError("trace: %1%", args[0]->string_view());
|
||||||
else
|
else
|
||||||
printError("trace: %1%", printValue(state, *args[0]));
|
printError("trace: %1%", ValuePrinter(state, *args[0]));
|
||||||
state.forceValue(*args[1], pos);
|
state.forceValue(*args[1], pos);
|
||||||
v = *args[1];
|
v = *args[1];
|
||||||
}
|
}
|
||||||
|
@ -1074,7 +1071,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
||||||
* often results from the composition of several functions
|
* often results from the composition of several functions
|
||||||
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
||||||
*/
|
*/
|
||||||
e.addTrace(nullptr, hintfmt(
|
e.addTrace(nullptr, HintFmt(
|
||||||
"while evaluating derivation '%s'\n"
|
"while evaluating derivation '%s'\n"
|
||||||
" whose name attribute is located at %s",
|
" whose name attribute is located at %s",
|
||||||
drvName, pos), true);
|
drvName, pos), true);
|
||||||
|
@ -1088,9 +1085,10 @@ drvName, Bindings * attrs, Value & v)
|
||||||
/* Check whether attributes should be passed as a JSON file. */
|
/* Check whether attributes should be passed as a JSON file. */
|
||||||
using nlohmann::json;
|
using nlohmann::json;
|
||||||
std::optional<json> jsonObject;
|
std::optional<json> jsonObject;
|
||||||
|
auto pos = v.determinePos(noPos);
|
||||||
auto attr = attrs->find(state.sStructuredAttrs);
|
auto attr = attrs->find(state.sStructuredAttrs);
|
||||||
if (attr != attrs->end() &&
|
if (attr != attrs->end() &&
|
||||||
state.forceBool(*attr->value, noPos,
|
state.forceBool(*attr->value, pos,
|
||||||
"while evaluating the `__structuredAttrs` "
|
"while evaluating the `__structuredAttrs` "
|
||||||
"attribute passed to builtins.derivationStrict"))
|
"attribute passed to builtins.derivationStrict"))
|
||||||
jsonObject = json::object();
|
jsonObject = json::object();
|
||||||
|
@ -1099,7 +1097,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
bool ignoreNulls = false;
|
bool ignoreNulls = false;
|
||||||
attr = attrs->find(state.sIgnoreNulls);
|
attr = attrs->find(state.sIgnoreNulls);
|
||||||
if (attr != attrs->end())
|
if (attr != attrs->end())
|
||||||
ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||||
|
|
||||||
/* Build the derivation expression by processing the attributes. */
|
/* Build the derivation expression by processing the attributes. */
|
||||||
Derivation drv;
|
Derivation drv;
|
||||||
|
@ -1128,37 +1126,33 @@ drvName, Bindings * attrs, Value & v)
|
||||||
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
||||||
ingestionMethod = TextIngestionMethod {};
|
ingestionMethod = TextIngestionMethod {};
|
||||||
} else
|
} else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
"invalid value '%s' for 'outputHashMode' attribute", s
|
||||||
.errPos = state.positions[noPos]
|
).atPos(v).debugThrow();
|
||||||
}));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
auto handleOutputs = [&](const Strings & ss) {
|
auto handleOutputs = [&](const Strings & ss) {
|
||||||
outputs.clear();
|
outputs.clear();
|
||||||
for (auto & j : ss) {
|
for (auto & j : ss) {
|
||||||
if (outputs.find(j) != outputs.end())
|
if (outputs.find(j) != outputs.end())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("duplicate derivation output '%1%'", j)
|
||||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
.atPos(v)
|
||||||
.errPos = state.positions[noPos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
/* !!! Check whether j is a valid attribute
|
/* !!! Check whether j is a valid attribute
|
||||||
name. */
|
name. */
|
||||||
/* Derivations cannot be named ‘drv’, because
|
/* Derivations cannot be named ‘drv’, because
|
||||||
then we'd have an attribute ‘drvPath’ in
|
then we'd have an attribute ‘drvPath’ in
|
||||||
the resulting set. */
|
the resulting set. */
|
||||||
if (j == "drv")
|
if (j == "drv")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("invalid derivation output name 'drv'")
|
||||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
.atPos(v)
|
||||||
.errPos = state.positions[noPos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
outputs.insert(j);
|
outputs.insert(j);
|
||||||
}
|
}
|
||||||
if (outputs.empty())
|
if (outputs.empty())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("derivation cannot have an empty set of outputs")
|
||||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
.atPos(v)
|
||||||
.errPos = state.positions[noPos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -1167,16 +1161,16 @@ drvName, Bindings * attrs, Value & v)
|
||||||
const std::string_view context_below("");
|
const std::string_view context_below("");
|
||||||
|
|
||||||
if (ignoreNulls) {
|
if (ignoreNulls) {
|
||||||
state.forceValue(*i->value, noPos);
|
state.forceValue(*i->value, pos);
|
||||||
if (i->value->type() == nNull) continue;
|
if (i->value->type() == nNull) continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) {
|
if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) {
|
||||||
contentAddressed = true;
|
contentAddressed = true;
|
||||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) {
|
else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) {
|
||||||
isImpure = true;
|
isImpure = true;
|
||||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||||
}
|
}
|
||||||
|
@ -1184,9 +1178,9 @@ drvName, Bindings * attrs, Value & v)
|
||||||
/* The `args' attribute is special: it supplies the
|
/* The `args' attribute is special: it supplies the
|
||||||
command-line arguments to the builder. */
|
command-line arguments to the builder. */
|
||||||
else if (i->name == state.sArgs) {
|
else if (i->name == state.sArgs) {
|
||||||
state.forceList(*i->value, noPos, context_below);
|
state.forceList(*i->value, pos, context_below);
|
||||||
for (auto elem : i->value->listItems()) {
|
for (auto elem : i->value->listItems()) {
|
||||||
auto s = state.coerceToString(noPos, *elem, context,
|
auto s = state.coerceToString(pos, *elem, context,
|
||||||
"while evaluating an element of the argument list",
|
"while evaluating an element of the argument list",
|
||||||
true).toOwned();
|
true).toOwned();
|
||||||
drv.args.push_back(s);
|
drv.args.push_back(s);
|
||||||
|
@ -1201,29 +1195,29 @@ drvName, Bindings * attrs, Value & v)
|
||||||
|
|
||||||
if (i->name == state.sStructuredAttrs) continue;
|
if (i->name == state.sStructuredAttrs) continue;
|
||||||
|
|
||||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context);
|
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
|
||||||
|
|
||||||
if (i->name == state.sBuilder)
|
if (i->name == state.sBuilder)
|
||||||
drv.builder = state.forceString(*i->value, context, noPos, context_below);
|
drv.builder = state.forceString(*i->value, context, pos, context_below);
|
||||||
else if (i->name == state.sSystem)
|
else if (i->name == state.sSystem)
|
||||||
drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below);
|
drv.platform = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||||
else if (i->name == state.sOutputHash)
|
else if (i->name == state.sOutputHash)
|
||||||
outputHash = state.forceStringNoCtx(*i->value, noPos, context_below);
|
outputHash = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||||
else if (i->name == state.sOutputHashAlgo)
|
else if (i->name == state.sOutputHashAlgo)
|
||||||
outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below);
|
outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||||
else if (i->name == state.sOutputHashMode)
|
else if (i->name == state.sOutputHashMode)
|
||||||
handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below));
|
handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below));
|
||||||
else if (i->name == state.sOutputs) {
|
else if (i->name == state.sOutputs) {
|
||||||
/* Require ‘outputs’ to be a list of strings. */
|
/* Require ‘outputs’ to be a list of strings. */
|
||||||
state.forceList(*i->value, noPos, context_below);
|
state.forceList(*i->value, pos, context_below);
|
||||||
Strings ss;
|
Strings ss;
|
||||||
for (auto elem : i->value->listItems())
|
for (auto elem : i->value->listItems())
|
||||||
ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below));
|
ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below));
|
||||||
handleOutputs(ss);
|
handleOutputs(ss);
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned();
|
auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned();
|
||||||
drv.env.emplace(key, s);
|
drv.env.emplace(key, s);
|
||||||
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
||||||
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
||||||
|
@ -1238,7 +1232,7 @@ drvName, Bindings * attrs, Value & v)
|
||||||
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[i->pos],
|
e.addTrace(state.positions[i->pos],
|
||||||
hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||||
true);
|
true);
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
@ -1281,16 +1275,14 @@ drvName, Bindings * attrs, Value & v)
|
||||||
|
|
||||||
/* Do we have all required attributes? */
|
/* Do we have all required attributes? */
|
||||||
if (drv.builder == "")
|
if (drv.builder == "")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("required attribute 'builder' missing")
|
||||||
.msg = hintfmt("required attribute 'builder' missing"),
|
.atPos(v)
|
||||||
.errPos = state.positions[noPos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
|
|
||||||
if (drv.platform == "")
|
if (drv.platform == "")
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("required attribute 'system' missing")
|
||||||
.msg = hintfmt("required attribute 'system' missing"),
|
.atPos(v)
|
||||||
.errPos = state.positions[noPos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
|
|
||||||
/* Check whether the derivation name is valid. */
|
/* Check whether the derivation name is valid. */
|
||||||
if (isDerivation(drvName) &&
|
if (isDerivation(drvName) &&
|
||||||
|
@ -1298,10 +1290,10 @@ drvName, Bindings * attrs, Value & v)
|
||||||
outputs.size() == 1 &&
|
outputs.size() == 1 &&
|
||||||
*(outputs.begin()) == "out"))
|
*(outputs.begin()) == "out"))
|
||||||
{
|
{
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension),
|
"derivation names are allowed to end in '%s' only if they produce a single derivation file",
|
||||||
.errPos = state.positions[noPos]
|
drvExtension
|
||||||
}));
|
).atPos(v).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (outputHash) {
|
if (outputHash) {
|
||||||
|
@ -1310,10 +1302,9 @@ drvName, Bindings * attrs, Value & v)
|
||||||
Ignore `__contentAddressed` because fixed output derivations are
|
Ignore `__contentAddressed` because fixed output derivations are
|
||||||
already content addressed. */
|
already content addressed. */
|
||||||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||||
state.debugThrowLastTrace(Error({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
"multiple outputs are not supported in fixed-output derivations"
|
||||||
.errPos = state.positions[noPos]
|
).atPos(v).debugThrow();
|
||||||
}));
|
|
||||||
|
|
||||||
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||||
|
|
||||||
|
@ -1332,10 +1323,8 @@ drvName, Bindings * attrs, Value & v)
|
||||||
|
|
||||||
else if (contentAddressed || isImpure) {
|
else if (contentAddressed || isImpure) {
|
||||||
if (contentAddressed && isImpure)
|
if (contentAddressed && isImpure)
|
||||||
throw EvalError({
|
state.error<EvalError>("derivation cannot be both content-addressed and impure")
|
||||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
.atPos(v).debugThrow();
|
||||||
.errPos = state.positions[noPos]
|
|
||||||
});
|
|
||||||
|
|
||||||
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||||
|
@ -1376,10 +1365,10 @@ drvName, Bindings * attrs, Value & v)
|
||||||
for (auto & i : outputs) {
|
for (auto & i : outputs) {
|
||||||
auto h = get(hashModulo.hashes, i);
|
auto h = get(hashModulo.hashes, i);
|
||||||
if (!h)
|
if (!h)
|
||||||
throw AssertionError({
|
state.error<AssertionError>(
|
||||||
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
"derivation produced no hash for output '%s'",
|
||||||
.errPos = state.positions[noPos],
|
i
|
||||||
});
|
).atPos(v).debugThrow();
|
||||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||||
drv.env[i] = state.store->printStorePath(outPath);
|
drv.env[i] = state.store->printStorePath(outPath);
|
||||||
drv.outputs.insert_or_assign(
|
drv.outputs.insert_or_assign(
|
||||||
|
@ -1485,10 +1474,10 @@ static RegisterPrimOp primop_toPath({
|
||||||
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
{
|
{
|
||||||
if (evalSettings.pureEval)
|
if (evalSettings.pureEval)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
|
"'%s' is not allowed in pure evaluation mode",
|
||||||
.errPos = state.positions[pos]
|
"builtins.storePath"
|
||||||
}));
|
).atPos(pos).debugThrow();
|
||||||
|
|
||||||
NixStringContext context;
|
NixStringContext context;
|
||||||
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||||
|
@ -1498,10 +1487,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
if (!state.store->isStorePath(path.abs()))
|
if (!state.store->isStorePath(path.abs()))
|
||||||
path = CanonPath(canonPath(path.abs(), true));
|
path = CanonPath(canonPath(path.abs(), true));
|
||||||
if (!state.store->isInStore(path.abs()))
|
if (!state.store->isInStore(path.abs()))
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("path '%1%' is not in the Nix store", path)
|
||||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
.atPos(pos).debugThrow();
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
auto path2 = state.store->toStorePath(path.abs()).first;
|
auto path2 = state.store->toStorePath(path.abs()).first;
|
||||||
if (!settings.readOnlyMode)
|
if (!settings.readOnlyMode)
|
||||||
state.store->ensurePath(path2);
|
state.store->ensurePath(path2);
|
||||||
|
@ -1616,7 +1603,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
auto path = realisePath(state, pos, *args[0]);
|
auto path = realisePath(state, pos, *args[0]);
|
||||||
auto s = path.readFile();
|
auto s = path.readFile();
|
||||||
if (s.find((char) 0) != std::string::npos)
|
if (s.find((char) 0) != std::string::npos)
|
||||||
state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
|
state.error<EvalError>(
|
||||||
|
"the contents of the file '%1%' cannot be represented as a Nix string",
|
||||||
|
path
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
StorePathSet refs;
|
StorePathSet refs;
|
||||||
if (state.store->isInStore(path.path.abs())) {
|
if (state.store->isInStore(path.path.abs())) {
|
||||||
try {
|
try {
|
||||||
|
@ -1673,10 +1663,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
auto rewrites = state.realiseContext(context);
|
auto rewrites = state.realiseContext(context);
|
||||||
path = rewriteStrings(path, rewrites);
|
path = rewriteStrings(path, rewrites);
|
||||||
} catch (InvalidPathError & e) {
|
} catch (InvalidPathError & e) {
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
|
"cannot find '%1%', since path '%2%' is not valid",
|
||||||
.errPos = state.positions[pos]
|
path,
|
||||||
}));
|
e.path
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
searchPath.elements.emplace_back(SearchPath::Elem {
|
searchPath.elements.emplace_back(SearchPath::Elem {
|
||||||
|
@ -1745,10 +1736,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ha)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
auto path = realisePath(state, pos, *args[1]);
|
auto path = realisePath(state, pos, *args[1]);
|
||||||
|
|
||||||
|
@ -1816,7 +1804,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||||
// detailed node info quickly in this case we produce a thunk to
|
// detailed node info quickly in this case we produce a thunk to
|
||||||
// query the file type lazily.
|
// query the file type lazily.
|
||||||
auto epath = state.allocValue();
|
auto epath = state.allocValue();
|
||||||
epath->mkPath(path + name);
|
epath->mkPath(path / name);
|
||||||
if (!readFileType)
|
if (!readFileType)
|
||||||
readFileType = &state.getBuiltin("readFileType");
|
readFileType = &state.getBuiltin("readFileType");
|
||||||
attr.mkApp(readFileType, epath);
|
attr.mkApp(readFileType, epath);
|
||||||
|
@ -1878,7 +1866,7 @@ static RegisterPrimOp primop_outputOf({
|
||||||
For instance,
|
For instance,
|
||||||
```nix
|
```nix
|
||||||
builtins.outputOf
|
builtins.outputOf
|
||||||
(builtins.outputOf myDrv "out)
|
(builtins.outputOf myDrv "out")
|
||||||
"out"
|
"out"
|
||||||
```
|
```
|
||||||
will return a placeholder for the output of the output of `myDrv`.
|
will return a placeholder for the output of the output of `myDrv`.
|
||||||
|
@ -2068,13 +2056,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
||||||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c.raw))
|
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c.raw))
|
||||||
refs.insert(p->path);
|
refs.insert(p->path);
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt(
|
"files created by %1% may not reference derivations, but %2% references %3%",
|
||||||
"in 'toFile': the file named '%1%' must not contain a reference "
|
"builtins.toFile",
|
||||||
"to a derivation but contains (%2%)",
|
name,
|
||||||
name, c.to_string()),
|
c.to_string()
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto storePath = settings.readOnlyMode
|
auto storePath = settings.readOnlyMode
|
||||||
|
@ -2241,9 +2228,12 @@ static void addPath(
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||||
auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair);
|
auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
|
||||||
if (expectedHash && expectedStorePath != dstPath)
|
if (expectedHash && expectedStorePath != dstPath)
|
||||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
state.error<EvalError>(
|
||||||
|
"store path mismatch in (possibly filtered) path added from '%s'",
|
||||||
|
path
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
state.allowAndSetStorePathString(dstPath, v);
|
state.allowAndSetStorePathString(dstPath, v);
|
||||||
} else
|
} else
|
||||||
state.allowAndSetStorePathString(*expectedStorePath, v);
|
state.allowAndSetStorePathString(*expectedStorePath, v);
|
||||||
|
@ -2343,16 +2333,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
else if (n == "sha256")
|
else if (n == "sha256")
|
||||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
"unsupported argument '%1%' to 'addPath'",
|
||||||
.errPos = state.positions[attr.pos]
|
state.symbols[attr.name]
|
||||||
}));
|
).atPos(attr.pos).debugThrow();
|
||||||
}
|
}
|
||||||
if (!path)
|
if (!path)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"),
|
"missing required 'path' attribute in the first argument to builtins.path"
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
if (name.empty())
|
if (name.empty())
|
||||||
name = path->baseName();
|
name = path->baseName();
|
||||||
|
|
||||||
|
@ -2770,10 +2759,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!args[0]->isLambda())
|
if (!args[0]->isLambda())
|
||||||
state.debugThrowLastTrace(TypeError({
|
state.error<TypeError>("'functionArgs' requires a function").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("'functionArgs' requires a function"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
if (!args[0]->lambda.fun->hasFormals()) {
|
if (!args[0]->lambda.fun->hasFormals()) {
|
||||||
v.mkAttrs(&state.emptyBindings);
|
v.mkAttrs(&state.emptyBindings);
|
||||||
|
@ -2943,10 +2929,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val
|
||||||
{
|
{
|
||||||
state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
|
state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
|
||||||
if (n < 0 || (unsigned int) n >= list.listSize())
|
if (n < 0 || (unsigned int) n >= list.listSize())
|
||||||
state.debugThrowLastTrace(Error({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("list index %1% is out of bounds", n),
|
"list index %1% is out of bounds",
|
||||||
.errPos = state.positions[pos]
|
n
|
||||||
}));
|
).atPos(pos).debugThrow();
|
||||||
state.forceValue(*list.listElems()[n], pos);
|
state.forceValue(*list.listElems()[n], pos);
|
||||||
v = *list.listElems()[n];
|
v = *list.listElems()[n];
|
||||||
}
|
}
|
||||||
|
@ -2991,10 +2977,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
{
|
{
|
||||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
|
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
|
||||||
if (args[0]->listSize() == 0)
|
if (args[0]->listSize() == 0)
|
||||||
state.debugThrowLastTrace(Error({
|
state.error<EvalError>("'tail' called on an empty list").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("'tail' called on an empty list"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
state.mkList(v, args[0]->listSize() - 1);
|
state.mkList(v, args[0]->listSize() - 1);
|
||||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||||
|
@ -3251,7 +3234,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
||||||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||||
|
|
||||||
if (len < 0)
|
if (len < 0)
|
||||||
state.error("cannot create list of size %1%", len).debugThrow<EvalError>();
|
state.error<EvalError>("cannot create list of size %1%", len).atPos(pos).debugThrow();
|
||||||
|
|
||||||
// More strict than striclty (!) necessary, but acceptable
|
// More strict than striclty (!) necessary, but acceptable
|
||||||
// as evaluating map without accessing any values makes little sense.
|
// as evaluating map without accessing any values makes little sense.
|
||||||
|
@ -3568,10 +3551,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
|
|
||||||
NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division");
|
NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division");
|
||||||
if (f2 == 0)
|
if (f2 == 0)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("division by zero").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("division by zero"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
if (args[0]->type() == nFloat || args[1]->type() == nFloat) {
|
if (args[0]->type() == nFloat || args[1]->type() == nFloat) {
|
||||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2);
|
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2);
|
||||||
|
@ -3580,10 +3560,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
||||||
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
|
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
|
||||||
/* Avoid division overflow as it might raise SIGFPE. */
|
/* Avoid division overflow as it might raise SIGFPE. */
|
||||||
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("overflow in integer division").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("overflow in integer division"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
v.mkInt(i1 / i2);
|
v.mkInt(i1 / i2);
|
||||||
}
|
}
|
||||||
|
@ -3714,10 +3691,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||||
|
|
||||||
if (start < 0)
|
if (start < 0)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("negative start position in 'substring'").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("negative start position in 'substring'"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
|
|
||||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||||
|
@ -3782,10 +3756,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
||||||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||||
if (!ha)
|
if (!ha)
|
||||||
state.debugThrowLastTrace(Error({
|
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
|
|
||||||
NixStringContext context; // discarded
|
NixStringContext context; // discarded
|
||||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||||
|
@ -3951,15 +3922,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
} catch (std::regex_error & e) {
|
} catch (std::regex_error & e) {
|
||||||
if (e.code() == std::regex_constants::error_space) {
|
if (e.code() == std::regex_constants::error_space) {
|
||||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
.atPos(pos)
|
||||||
.errPos = state.positions[pos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
} else
|
} else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
.atPos(pos)
|
||||||
.errPos = state.positions[pos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4055,15 +4024,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||||
} catch (std::regex_error & e) {
|
} catch (std::regex_error & e) {
|
||||||
if (e.code() == std::regex_constants::error_space) {
|
if (e.code() == std::regex_constants::error_space) {
|
||||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
.atPos(pos)
|
||||||
.errPos = state.positions[pos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
} else
|
} else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
.atPos(pos)
|
||||||
.errPos = state.positions[pos]
|
.debugThrow();
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4139,7 +4106,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
||||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
||||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
||||||
if (args[0]->listSize() != args[1]->listSize())
|
if (args[0]->listSize() != args[1]->listSize())
|
||||||
state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow<EvalError>();
|
state.error<EvalError>(
|
||||||
|
"'from' and 'to' arguments passed to builtins.replaceStrings have different lengths"
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
|
|
||||||
std::vector<std::string> from;
|
std::vector<std::string> from;
|
||||||
from.reserve(args[0]->listSize());
|
from.reserve(args[0]->listSize());
|
||||||
|
|
|
@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V
|
||||||
|
|
||||||
auto contextSize = context.size();
|
auto contextSize = context.size();
|
||||||
if (contextSize != 1) {
|
if (contextSize != 1) {
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize),
|
"context of string '%s' must have exactly one element, but has %d",
|
||||||
.errPos = state.positions[pos]
|
*s,
|
||||||
});
|
contextSize
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
}
|
}
|
||||||
NixStringContext context2 {
|
NixStringContext context2 {
|
||||||
(NixStringContextElem { std::visit(overloaded {
|
(NixStringContextElem { std::visit(overloaded {
|
||||||
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
||||||
if (!c.path.isDerivation()) {
|
if (!c.path.isDerivation()) {
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("path '%s' is not a derivation",
|
"path '%s' is not a derivation",
|
||||||
state.store->printStorePath(c.path)),
|
state.store->printStorePath(c.path)
|
||||||
.errPos = state.positions[pos],
|
).atPos(pos).debugThrow();
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return NixStringContextElem::DrvDeep {
|
return NixStringContextElem::DrvDeep {
|
||||||
.drvPath = c.path,
|
.drvPath = c.path,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output),
|
"`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'",
|
||||||
.errPos = state.positions[pos],
|
c.output
|
||||||
});
|
).atPos(pos).debugThrow();
|
||||||
},
|
},
|
||||||
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
||||||
/* Reuse original item because we want this to be idempotent. */
|
/* Reuse original item because we want this to be idempotent. */
|
||||||
|
@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||||
for (auto & i : *args[1]->attrs) {
|
for (auto & i : *args[1]->attrs) {
|
||||||
const auto & name = state.symbols[i.name];
|
const auto & name = state.symbols[i.name];
|
||||||
if (!state.store->isStorePath(name))
|
if (!state.store->isStorePath(name))
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("context key '%s' is not a store path", name),
|
"context key '%s' is not a store path",
|
||||||
.errPos = state.positions[i.pos]
|
name
|
||||||
});
|
).atPos(i.pos).debugThrow();
|
||||||
auto namePath = state.store->parseStorePath(name);
|
auto namePath = state.store->parseStorePath(name);
|
||||||
if (!settings.readOnlyMode)
|
if (!settings.readOnlyMode)
|
||||||
state.store->ensurePath(namePath);
|
state.store->ensurePath(namePath);
|
||||||
|
@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||||
if (iter != i.value->attrs->end()) {
|
if (iter != i.value->attrs->end()) {
|
||||||
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||||
if (!isDerivation(name)) {
|
if (!isDerivation(name)) {
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name),
|
"tried to add all-outputs context of %s, which is not a derivation, to a string",
|
||||||
.errPos = state.positions[i.pos]
|
name
|
||||||
});
|
).atPos(i.pos).debugThrow();
|
||||||
}
|
}
|
||||||
context.emplace(NixStringContextElem::DrvDeep {
|
context.emplace(NixStringContextElem::DrvDeep {
|
||||||
.drvPath = namePath,
|
.drvPath = namePath,
|
||||||
|
@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
||||||
if (iter != i.value->attrs->end()) {
|
if (iter != i.value->attrs->end()) {
|
||||||
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
|
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
|
||||||
if (iter->value->listSize() && !isDerivation(name)) {
|
if (iter->value->listSize() && !isDerivation(name)) {
|
||||||
throw EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name),
|
"tried to add derivation output context of %s, which is not a derivation, to a string",
|
||||||
.errPos = state.positions[i.pos]
|
name
|
||||||
});
|
).atPos(i.pos).debugThrow();
|
||||||
}
|
}
|
||||||
for (auto elem : iter->value->listItems()) {
|
for (auto elem : iter->value->listItems()) {
|
||||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||||
|
|
|
@ -23,20 +23,20 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||||
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
|
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
|
||||||
if (toPathMaybe && *toPathMaybe != rewrittenPath)
|
if (toPathMaybe && *toPathMaybe != rewrittenPath)
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
.msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||||
state.store->printStorePath(fromPath),
|
state.store->printStorePath(fromPath),
|
||||||
state.store->printStorePath(rewrittenPath),
|
state.store->printStorePath(rewrittenPath),
|
||||||
state.store->printStorePath(*toPathMaybe)),
|
state.store->printStorePath(*toPathMaybe)),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
if (!toPathMaybe)
|
if (!toPathMaybe)
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt(
|
.msg = HintFmt(
|
||||||
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
||||||
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
||||||
state.store->printStorePath(fromPath),
|
state.store->printStorePath(fromPath),
|
||||||
state.store->printStorePath(rewrittenPath)),
|
state.store->printStorePath(rewrittenPath)),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,11 +50,11 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
||||||
// We don't perform the rewriting when outPath already exists, as an optimisation.
|
// We don't perform the rewriting when outPath already exists, as an optimisation.
|
||||||
// However, we can quickly detect a mistake if the toPath is input addressed.
|
// However, we can quickly detect a mistake if the toPath is input addressed.
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt(
|
.msg = HintFmt(
|
||||||
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
||||||
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
||||||
state.store->printStorePath(toPath)),
|
state.store->printStorePath(toPath)),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -73,14 +73,14 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos
|
||||||
|
|
||||||
if (!info->isContentAddressed(*state.store)) {
|
if (!info->isContentAddressed(*state.store)) {
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt(
|
.msg = HintFmt(
|
||||||
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
||||||
"If you do intend to fetch an input-addressed store path, add\n\n"
|
"If you do intend to fetch an input-addressed store path, add\n\n"
|
||||||
" inputAddressed = true;\n\n"
|
" inputAddressed = true;\n\n"
|
||||||
"to the 'fetchClosure' arguments.\n\n"
|
"to the 'fetchClosure' arguments.\n\n"
|
||||||
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
||||||
state.store->printStorePath(fromPath)),
|
state.store->printStorePath(fromPath)),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,11 +99,11 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId
|
||||||
|
|
||||||
if (info->isContentAddressed(*state.store)) {
|
if (info->isContentAddressed(*state.store)) {
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt(
|
.msg = HintFmt(
|
||||||
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
||||||
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
||||||
state.store->printStorePath(fromPath)),
|
state.store->printStorePath(fromPath)),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,15 +153,15 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
|
|
||||||
else
|
else
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fromPath)
|
if (!fromPath)
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
|
|
||||||
bool inputAddressed = inputAddressedMaybe.value_or(false);
|
bool inputAddressed = inputAddressedMaybe.value_or(false);
|
||||||
|
@ -169,17 +169,17 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
if (inputAddressed) {
|
if (inputAddressed) {
|
||||||
if (toPath)
|
if (toPath)
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
.msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||||
"inputAddressed",
|
"inputAddressed",
|
||||||
"toPath"),
|
"toPath"),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fromStoreUrl)
|
if (!fromStoreUrl)
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
|
|
||||||
auto parsedURL = parseURL(*fromStoreUrl);
|
auto parsedURL = parseURL(*fromStoreUrl);
|
||||||
|
@ -188,14 +188,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
||||||
parsedURL.scheme != "https" &&
|
parsedURL.scheme != "https" &&
|
||||||
!(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
!(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"),
|
.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!parsedURL.query.empty())
|
if (!parsedURL.query.empty())
|
||||||
throw Error({
|
throw Error({
|
||||||
.msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||||
.errPos = state.positions[pos]
|
.pos = state.positions[pos]
|
||||||
});
|
});
|
||||||
|
|
||||||
auto fromStore = openStore(parsedURL.to_string());
|
auto fromStore = openStore(parsedURL.to_string());
|
||||||
|
|
|
@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||||
else
|
else
|
||||||
throw EvalError({
|
state.error<EvalError>("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow();
|
||||||
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
|
|
||||||
.errPos = state.positions[attr.pos]
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (url.empty())
|
if (url.empty())
|
||||||
throw EvalError({
|
state.error<EvalError>("'url' argument required").atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("'url' argument required"),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
});
|
|
||||||
|
|
||||||
} else
|
} else
|
||||||
url = state.coerceToString(pos, *args[0], context,
|
url = state.coerceToString(pos, *args[0], context,
|
||||||
|
|
|
@ -100,16 +100,14 @@ static void fetchTree(
|
||||||
|
|
||||||
if (auto aType = args[0]->attrs->get(state.sType)) {
|
if (auto aType = args[0]->attrs->get(state.sType)) {
|
||||||
if (type)
|
if (type)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("unexpected attribute 'type'"),
|
"unexpected attribute 'type'"
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
|
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
|
||||||
} else if (!type)
|
} else if (!type)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
"attribute 'type' is missing in call to 'fetchTree'"
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
|
|
||||||
attrs.emplace("type", type.value());
|
attrs.emplace("type", type.value());
|
||||||
|
|
||||||
|
@ -132,8 +130,8 @@ static void fetchTree(
|
||||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
state.error<TypeError>("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||||
state.symbols[attr.name], showType(*attr.value)));
|
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||||
|
@ -142,10 +140,9 @@ static void fetchTree(
|
||||||
|
|
||||||
if (!params.allowNameArgument)
|
if (!params.allowNameArgument)
|
||||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"),
|
"attribute 'name' isn’t supported in call to 'fetchTree'"
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
|
|
||||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
} else {
|
} else {
|
||||||
|
@ -163,10 +160,9 @@ static void fetchTree(
|
||||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||||
} else {
|
} else {
|
||||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"),
|
"passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"
|
||||||
.errPos = state.positions[pos]
|
).atPos(pos).debugThrow();
|
||||||
}));
|
|
||||||
input = fetchers::Input::fromURL(url);
|
input = fetchers::Input::fromURL(url);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -174,8 +170,16 @@ static void fetchTree(
|
||||||
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||||
input = lookupInRegistries(state.store, input).first;
|
input = lookupInRegistries(state.store, input).first;
|
||||||
|
|
||||||
if (evalSettings.pureEval && !input.isLocked())
|
if (evalSettings.pureEval && !input.isLocked()) {
|
||||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
auto fetcher = "fetchTree";
|
||||||
|
if (params.isFetchGit)
|
||||||
|
fetcher = "fetchGit";
|
||||||
|
|
||||||
|
state.error<EvalError>(
|
||||||
|
"in pure evaluation mode, %s requires a locked input",
|
||||||
|
fetcher
|
||||||
|
).atPos(pos).debugThrow();
|
||||||
|
}
|
||||||
|
|
||||||
state.checkURI(input.toURLString());
|
state.checkURI(input.toURLString());
|
||||||
|
|
||||||
|
@ -428,17 +432,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
else if (n == "name")
|
else if (n == "name")
|
||||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||||
else
|
else
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who)
|
||||||
.msg = hintfmt("unsupported argument '%s' to '%s'", n, who),
|
.atPos(pos).debugThrow();
|
||||||
.errPos = state.positions[attr.pos]
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!url)
|
if (!url)
|
||||||
state.debugThrowLastTrace(EvalError({
|
state.error<EvalError>(
|
||||||
.msg = hintfmt("'url' argument required"),
|
"'url' argument required").atPos(pos).debugThrow();
|
||||||
.errPos = state.positions[pos]
|
|
||||||
}));
|
|
||||||
} else
|
} else
|
||||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||||
|
|
||||||
|
@ -451,7 +451,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
name = baseNameOf(*url);
|
name = baseNameOf(*url);
|
||||||
|
|
||||||
if (evalSettings.pureEval && !expectedHash)
|
if (evalSettings.pureEval && !expectedHash)
|
||||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
|
||||||
|
|
||||||
// early exit if pinned and already in the store
|
// early exit if pinned and already in the store
|
||||||
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||||
|
@ -480,9 +480,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
||||||
auto hash = unpack
|
auto hash = unpack
|
||||||
? state.store->queryPathInfo(storePath)->narHash
|
? state.store->queryPathInfo(storePath)->narHash
|
||||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||||
if (hash != *expectedHash)
|
if (hash != *expectedHash) {
|
||||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
state.error<EvalError>(
|
||||||
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||||
|
*url,
|
||||||
|
expectedHash->to_string(HashFormat::Nix32, true),
|
||||||
|
hash.to_string(HashFormat::Nix32, true)
|
||||||
|
).withExitStatus(102)
|
||||||
|
.debugThrow();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
state.allowAndSetStorePathString(storePath, v);
|
state.allowAndSetStorePathString(storePath, v);
|
||||||
|
@ -610,8 +616,7 @@ static RegisterPrimOp primop_fetchGit({
|
||||||
|
|
||||||
- `shallow` (default: `false`)
|
- `shallow` (default: `false`)
|
||||||
|
|
||||||
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
|
Make a shallow clone when fetching the Git tree.
|
||||||
This still performs a full clone of what is available on the remote.
|
|
||||||
|
|
||||||
- `allRefs`
|
- `allRefs`
|
||||||
|
|
||||||
|
|
|
@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
|
||||||
try {
|
try {
|
||||||
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
||||||
} catch (std::exception & e) { // TODO: toml::syntax_error
|
} catch (std::exception & e) { // TODO: toml::syntax_error
|
||||||
throw EvalError({
|
state.error<EvalError>("while parsing TOML: %s", e.what()).atPos(pos).debugThrow();
|
||||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
|
||||||
.errPos = state.positions[pos]
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#include "print-ambiguous.hh"
|
#include "print-ambiguous.hh"
|
||||||
#include "print.hh"
|
#include "print.hh"
|
||||||
#include "signals.hh"
|
#include "signals.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -36,11 +36,17 @@ struct PrintOptions
|
||||||
*/
|
*/
|
||||||
size_t maxDepth = std::numeric_limits<size_t>::max();
|
size_t maxDepth = std::numeric_limits<size_t>::max();
|
||||||
/**
|
/**
|
||||||
* Maximum number of attributes in an attribute set to print.
|
* Maximum number of attributes in attribute sets to print.
|
||||||
|
*
|
||||||
|
* Note that this is a limit for the entire print invocation, not for each
|
||||||
|
* attribute set encountered.
|
||||||
*/
|
*/
|
||||||
size_t maxAttrs = std::numeric_limits<size_t>::max();
|
size_t maxAttrs = std::numeric_limits<size_t>::max();
|
||||||
/**
|
/**
|
||||||
* Maximum number of list items to print.
|
* Maximum number of list items to print.
|
||||||
|
*
|
||||||
|
* Note that this is a limit for the entire print invocation, not for each
|
||||||
|
* list encountered.
|
||||||
*/
|
*/
|
||||||
size_t maxListItems = std::numeric_limits<size_t>::max();
|
size_t maxListItems = std::numeric_limits<size_t>::max();
|
||||||
/**
|
/**
|
||||||
|
@ -49,4 +55,16 @@ struct PrintOptions
|
||||||
size_t maxStringLength = std::numeric_limits<size_t>::max();
|
size_t maxStringLength = std::numeric_limits<size_t>::max();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `PrintOptions` for unknown and therefore potentially large values in error messages,
|
||||||
|
* to avoid printing "too much" output.
|
||||||
|
*/
|
||||||
|
static PrintOptions errorPrintOptions = PrintOptions {
|
||||||
|
.ansiColors = true,
|
||||||
|
.maxDepth = 10,
|
||||||
|
.maxAttrs = 10,
|
||||||
|
.maxListItems = 10,
|
||||||
|
.maxStringLength = 1024
|
||||||
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "terminal.hh"
|
#include "terminal.hh"
|
||||||
#include "english.hh"
|
#include "english.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ void printElided(
|
||||||
{
|
{
|
||||||
if (ansiColors)
|
if (ansiColors)
|
||||||
output << ANSI_FAINT;
|
output << ANSI_FAINT;
|
||||||
output << " «";
|
output << "«";
|
||||||
pluralize(output, value, single, plural);
|
pluralize(output, value, single, plural);
|
||||||
output << " elided»";
|
output << " elided»";
|
||||||
if (ansiColors)
|
if (ansiColors)
|
||||||
|
@ -36,7 +37,7 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max
|
||||||
str << "\"";
|
str << "\"";
|
||||||
for (auto i = string.begin(); i != string.end(); ++i) {
|
for (auto i = string.begin(); i != string.end(); ++i) {
|
||||||
if (charsPrinted >= maxLength) {
|
if (charsPrinted >= maxLength) {
|
||||||
str << "\"";
|
str << "\" ";
|
||||||
printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors);
|
printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors);
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
|
@ -151,7 +152,7 @@ struct ImportantFirstAttrNameCmp
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::set<Value *> ValuesSeen;
|
typedef std::set<const void *> ValuesSeen;
|
||||||
|
|
||||||
class Printer
|
class Printer
|
||||||
{
|
{
|
||||||
|
@ -160,6 +161,8 @@ private:
|
||||||
EvalState & state;
|
EvalState & state;
|
||||||
PrintOptions options;
|
PrintOptions options;
|
||||||
std::optional<ValuesSeen> seen;
|
std::optional<ValuesSeen> seen;
|
||||||
|
size_t attrsPrinted = 0;
|
||||||
|
size_t listItemsPrinted = 0;
|
||||||
|
|
||||||
void printRepeated()
|
void printRepeated()
|
||||||
{
|
{
|
||||||
|
@ -252,14 +255,14 @@ private:
|
||||||
output << "»";
|
output << "»";
|
||||||
if (options.ansiColors)
|
if (options.ansiColors)
|
||||||
output << ANSI_NORMAL;
|
output << ANSI_NORMAL;
|
||||||
} catch (BaseError & e) {
|
} catch (Error & e) {
|
||||||
printError_(e);
|
printError_(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void printAttrs(Value & v, size_t depth)
|
void printAttrs(Value & v, size_t depth)
|
||||||
{
|
{
|
||||||
if (seen && !seen->insert(&v).second) {
|
if (seen && !seen->insert(v.attrs).second) {
|
||||||
printRepeated();
|
printRepeated();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -278,7 +281,6 @@ private:
|
||||||
else
|
else
|
||||||
std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp());
|
std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp());
|
||||||
|
|
||||||
size_t attrsPrinted = 0;
|
|
||||||
for (auto & i : sorted) {
|
for (auto & i : sorted) {
|
||||||
if (attrsPrinted >= options.maxAttrs) {
|
if (attrsPrinted >= options.maxAttrs) {
|
||||||
printElided(sorted.size() - attrsPrinted, "attribute", "attributes");
|
printElided(sorted.size() - attrsPrinted, "attribute", "attributes");
|
||||||
|
@ -306,7 +308,6 @@ private:
|
||||||
|
|
||||||
output << "[ ";
|
output << "[ ";
|
||||||
if (depth < options.maxDepth) {
|
if (depth < options.maxDepth) {
|
||||||
size_t listItemsPrinted = 0;
|
|
||||||
for (auto elem : v.listItems()) {
|
for (auto elem : v.listItems()) {
|
||||||
if (listItemsPrinted >= options.maxListItems) {
|
if (listItemsPrinted >= options.maxListItems) {
|
||||||
printElided(v.listSize() - listItemsPrinted, "item", "items");
|
printElided(v.listSize() - listItemsPrinted, "item", "items");
|
||||||
|
@ -404,11 +405,11 @@ private:
|
||||||
output << ANSI_NORMAL;
|
output << ANSI_NORMAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
void printError_(BaseError & e)
|
void printError_(Error & e)
|
||||||
{
|
{
|
||||||
if (options.ansiColors)
|
if (options.ansiColors)
|
||||||
output << ANSI_RED;
|
output << ANSI_RED;
|
||||||
output << "«" << e.msg() << "»";
|
output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»";
|
||||||
if (options.ansiColors)
|
if (options.ansiColors)
|
||||||
output << ANSI_NORMAL;
|
output << ANSI_NORMAL;
|
||||||
}
|
}
|
||||||
|
@ -421,7 +422,7 @@ private:
|
||||||
if (options.force) {
|
if (options.force) {
|
||||||
try {
|
try {
|
||||||
state.forceValue(v, v.determinePos(noPos));
|
state.forceValue(v, v.determinePos(noPos));
|
||||||
} catch (BaseError & e) {
|
} catch (Error & e) {
|
||||||
printError_(e);
|
printError_(e);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -485,6 +486,9 @@ public:
|
||||||
|
|
||||||
void print(Value & v)
|
void print(Value & v)
|
||||||
{
|
{
|
||||||
|
attrsPrinted = 0;
|
||||||
|
listItemsPrinted = 0;
|
||||||
|
|
||||||
if (options.trackRepeated) {
|
if (options.trackRepeated) {
|
||||||
seen.emplace();
|
seen.emplace();
|
||||||
} else {
|
} else {
|
||||||
|
@ -501,4 +505,17 @@ void printValue(EvalState & state, std::ostream & output, Value & v, PrintOption
|
||||||
Printer(output, state, options).print(v);
|
Printer(output, state, options).print(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer)
|
||||||
|
{
|
||||||
|
printValue(printer.state, output, printer.value, printer.options);
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<>
|
||||||
|
HintFmt & HintFmt::operator%(const ValuePrinter & value)
|
||||||
|
{
|
||||||
|
fmt % value;
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,11 +9,14 @@
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
#include "eval.hh"
|
#include "fmt.hh"
|
||||||
#include "print-options.hh"
|
#include "print-options.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
class EvalState;
|
||||||
|
struct Value;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print a string as a Nix string literal.
|
* Print a string as a Nix string literal.
|
||||||
*
|
*
|
||||||
|
@ -59,4 +62,30 @@ std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
||||||
|
|
||||||
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {});
|
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A partially-applied form of `printValue` which can be formatted using `<<`
|
||||||
|
* without allocating an intermediate string.
|
||||||
|
*/
|
||||||
|
class ValuePrinter {
|
||||||
|
friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer);
|
||||||
|
private:
|
||||||
|
EvalState & state;
|
||||||
|
Value & value;
|
||||||
|
PrintOptions options;
|
||||||
|
|
||||||
|
public:
|
||||||
|
ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {})
|
||||||
|
: state(state), value(value), options(options) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* `ValuePrinter` does its own ANSI formatting, so we don't color it
|
||||||
|
* magenta.
|
||||||
|
*/
|
||||||
|
template<>
|
||||||
|
HintFmt & HintFmt::operator%(const ValuePrinter & value);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace(state.positions[a.pos],
|
e.addTrace(state.positions[a.pos],
|
||||||
hintfmt("while evaluating attribute '%1%'", j));
|
HintFmt("while evaluating attribute '%1%'", j));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -80,8 +80,8 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
try {
|
try {
|
||||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
e.addTrace({},
|
e.addTrace(state.positions[pos],
|
||||||
hintfmt("while evaluating list element at index %1%", i));
|
HintFmt("while evaluating list element at index %1%", i));
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
|
@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict,
|
||||||
|
|
||||||
case nThunk:
|
case nThunk:
|
||||||
case nFunction:
|
case nFunction:
|
||||||
auto e = TypeError({
|
state.error<TypeError>(
|
||||||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
"cannot convert %1% to JSON",
|
||||||
.errPos = state.positions[v.determinePos(pos)]
|
showType(v)
|
||||||
});
|
)
|
||||||
e.addTrace(state.positions[pos], hintfmt("message for the trace"));
|
.atPos(v.determinePos(pos))
|
||||||
state.debugThrowLastTrace(e);
|
.debugThrow();
|
||||||
throw e;
|
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict,
|
||||||
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||||
NixStringContext & context, bool copyToStore) const
|
NixStringContext & context, bool copyToStore) const
|
||||||
{
|
{
|
||||||
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
|
state.error<TypeError>("cannot convert %1% to JSON", showType())
|
||||||
|
.debugThrow();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -105,7 +105,7 @@ class ExternalValueBase
|
||||||
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||||
* error.
|
* error.
|
||||||
*/
|
*/
|
||||||
virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compare to another value of the same type. Defaults to uncomparable,
|
* Compare to another value of the same type. Defaults to uncomparable,
|
||||||
|
|
|
@ -19,8 +19,8 @@ public:
|
||||||
: Error("")
|
: Error("")
|
||||||
{
|
{
|
||||||
raw = raw_;
|
raw = raw_;
|
||||||
auto hf = hintfmt(args...);
|
auto hf = HintFmt(args...);
|
||||||
err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw);
|
err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -5,26 +5,26 @@ namespace nix {
|
||||||
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
||||||
{
|
{
|
||||||
checkAccess(path);
|
checkAccess(path);
|
||||||
return next->readFile(prefix + path);
|
return next->readFile(prefix / path);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
||||||
{
|
{
|
||||||
return isAllowed(path) && next->pathExists(prefix + path);
|
return isAllowed(path) && next->pathExists(prefix / path);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
||||||
{
|
{
|
||||||
checkAccess(path);
|
checkAccess(path);
|
||||||
return next->maybeLstat(prefix + path);
|
return next->maybeLstat(prefix / path);
|
||||||
}
|
}
|
||||||
|
|
||||||
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
||||||
{
|
{
|
||||||
checkAccess(path);
|
checkAccess(path);
|
||||||
DirEntries entries;
|
DirEntries entries;
|
||||||
for (auto & entry : next->readDirectory(prefix + path)) {
|
for (auto & entry : next->readDirectory(prefix / path)) {
|
||||||
if (isAllowed(path + entry.first))
|
if (isAllowed(path / entry.first))
|
||||||
entries.insert(std::move(entry));
|
entries.insert(std::move(entry));
|
||||||
}
|
}
|
||||||
return entries;
|
return entries;
|
||||||
|
@ -33,12 +33,12 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath
|
||||||
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
||||||
{
|
{
|
||||||
checkAccess(path);
|
checkAccess(path);
|
||||||
return next->readLink(prefix + path);
|
return next->readLink(prefix / path);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
||||||
{
|
{
|
||||||
return next->showPath(prefix + path);
|
return next->showPath(prefix / path);
|
||||||
}
|
}
|
||||||
|
|
||||||
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
||||||
|
|
|
@ -6,72 +6,30 @@ namespace nix {
|
||||||
|
|
||||||
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
||||||
{
|
{
|
||||||
CanonPath root;
|
using PosixSourceAccessor::PosixSourceAccessor;
|
||||||
|
|
||||||
FSInputAccessor(const CanonPath & root)
|
|
||||||
: root(root)
|
|
||||||
{
|
|
||||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
|
||||||
}
|
|
||||||
|
|
||||||
void readFile(
|
|
||||||
const CanonPath & path,
|
|
||||||
Sink & sink,
|
|
||||||
std::function<void(uint64_t)> sizeCallback) override
|
|
||||||
{
|
|
||||||
auto absPath = makeAbsPath(path);
|
|
||||||
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool pathExists(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
return PosixSourceAccessor::pathExists(makeAbsPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntries readDirectory(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
DirEntries res;
|
|
||||||
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
|
|
||||||
res.emplace(entry);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string readLink(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
return PosixSourceAccessor::readLink(makeAbsPath(path));
|
|
||||||
}
|
|
||||||
|
|
||||||
CanonPath makeAbsPath(const CanonPath & path)
|
|
||||||
{
|
|
||||||
return root + path;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
|
||||||
{
|
|
||||||
return makeAbsPath(path);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
|
ref<InputAccessor> makeFSInputAccessor()
|
||||||
{
|
{
|
||||||
return make_ref<FSInputAccessor>(root);
|
return make_ref<FSInputAccessor>();
|
||||||
|
}
|
||||||
|
|
||||||
|
ref<InputAccessor> makeFSInputAccessor(std::filesystem::path root)
|
||||||
|
{
|
||||||
|
return make_ref<FSInputAccessor>(std::move(root));
|
||||||
}
|
}
|
||||||
|
|
||||||
ref<InputAccessor> makeStorePathAccessor(
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
const StorePath & storePath)
|
const StorePath & storePath)
|
||||||
{
|
{
|
||||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
|
// FIXME: should use `store->getFSAccessor()`
|
||||||
|
return makeFSInputAccessor(std::filesystem::path { store->toRealPath(storePath) });
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath getUnfilteredRootPath(CanonPath path)
|
SourcePath getUnfilteredRootPath(CanonPath path)
|
||||||
{
|
{
|
||||||
static auto rootFS = makeFSInputAccessor(CanonPath::root);
|
static auto rootFS = makeFSInputAccessor();
|
||||||
return {rootFS, path};
|
return {rootFS, path};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,8 +8,9 @@ namespace nix {
|
||||||
class StorePath;
|
class StorePath;
|
||||||
class Store;
|
class Store;
|
||||||
|
|
||||||
ref<InputAccessor> makeFSInputAccessor(
|
ref<InputAccessor> makeFSInputAccessor();
|
||||||
const CanonPath & root);
|
|
||||||
|
ref<InputAccessor> makeFSInputAccessor(std::filesystem::path root);
|
||||||
|
|
||||||
ref<InputAccessor> makeStorePathAccessor(
|
ref<InputAccessor> makeStorePathAccessor(
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
|
|
|
@ -139,15 +139,16 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
||||||
|
|
||||||
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
{
|
{
|
||||||
CanonPath path;
|
/** Location of the repository on disk. */
|
||||||
|
std::filesystem::path path;
|
||||||
Repository repo;
|
Repository repo;
|
||||||
|
|
||||||
GitRepoImpl(CanonPath _path, bool create, bool bare)
|
GitRepoImpl(std::filesystem::path _path, bool create, bool bare)
|
||||||
: path(std::move(_path))
|
: path(std::move(_path))
|
||||||
{
|
{
|
||||||
initLibGit2();
|
initLibGit2();
|
||||||
|
|
||||||
if (pathExists(path.abs())) {
|
if (pathExists(path.native())) {
|
||||||
if (git_repository_open(Setter(repo), path.c_str()))
|
if (git_repository_open(Setter(repo), path.c_str()))
|
||||||
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
|
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
|
||||||
} else {
|
} else {
|
||||||
|
@ -220,10 +221,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
return toHash(*oid);
|
return toHash(*oid);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Submodule> parseSubmodules(const CanonPath & configFile)
|
std::vector<Submodule> parseSubmodules(const std::filesystem::path & configFile)
|
||||||
{
|
{
|
||||||
GitConfig config;
|
GitConfig config;
|
||||||
if (git_config_open_ondisk(Setter(config), configFile.abs().c_str()))
|
if (git_config_open_ondisk(Setter(config), configFile.c_str()))
|
||||||
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
|
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
|
||||||
|
|
||||||
ConfigIterator it;
|
ConfigIterator it;
|
||||||
|
@ -294,8 +295,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
throw Error("getting working directory status: %s", git_error_last()->message);
|
throw Error("getting working directory status: %s", git_error_last()->message);
|
||||||
|
|
||||||
/* Get submodule info. */
|
/* Get submodule info. */
|
||||||
auto modulesFile = path + ".gitmodules";
|
auto modulesFile = path / ".gitmodules";
|
||||||
if (pathExists(modulesFile.abs()))
|
if (pathExists(modulesFile))
|
||||||
info.submodules = parseSubmodules(modulesFile);
|
info.submodules = parseSubmodules(modulesFile);
|
||||||
|
|
||||||
return info;
|
return info;
|
||||||
|
@ -382,27 +383,27 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
{
|
{
|
||||||
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
||||||
|
|
||||||
Remote remote;
|
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that)
|
||||||
|
// then use code that was removed in this commit (see blame)
|
||||||
|
|
||||||
if (git_remote_create_anonymous(Setter(remote), *this, url.c_str()))
|
auto dir = this->path;
|
||||||
throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message);
|
Strings gitArgs;
|
||||||
|
if (shallow) {
|
||||||
|
gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--", url, refspec };
|
||||||
|
}
|
||||||
|
|
||||||
char * refspecs[] = {(char *) refspec.c_str()};
|
runProgram(RunOptions {
|
||||||
git_strarray refspecs2 {
|
.program = "git",
|
||||||
.strings = refspecs,
|
.searchPath = true,
|
||||||
.count = 1
|
// FIXME: git stderr messes up our progress indicator, so
|
||||||
};
|
// we're using --quiet for now. Should process its stderr.
|
||||||
|
.args = gitArgs,
|
||||||
git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;
|
.input = {},
|
||||||
// FIXME: for some reason, shallow fetching over ssh barfs
|
.isInteractive = true
|
||||||
// with "could not read from remote repository".
|
});
|
||||||
opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL;
|
|
||||||
opts.callbacks.payload = &act;
|
|
||||||
opts.callbacks.sideband_progress = sidebandProgressCallback;
|
|
||||||
opts.callbacks.transfer_progress = transferProgressCallback;
|
|
||||||
|
|
||||||
if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr))
|
|
||||||
throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void verifyCommit(
|
void verifyCommit(
|
||||||
|
@ -437,7 +438,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
.args = {
|
.args = {
|
||||||
"-c",
|
"-c",
|
||||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||||
"-C", path.abs(),
|
"-C", path,
|
||||||
"verify-commit",
|
"verify-commit",
|
||||||
rev.gitRev()
|
rev.gitRev()
|
||||||
},
|
},
|
||||||
|
@ -464,7 +465,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
|
ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
|
||||||
{
|
{
|
||||||
return make_ref<GitRepoImpl>(path, create, bare);
|
return make_ref<GitRepoImpl>(path, create, bare);
|
||||||
}
|
}
|
||||||
|
@ -780,7 +781,7 @@ std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules
|
||||||
|
|
||||||
auto rawAccessor = getRawAccessor(rev);
|
auto rawAccessor = getRawAccessor(rev);
|
||||||
|
|
||||||
for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) {
|
for (auto & submodule : parseSubmodules(pathTemp)) {
|
||||||
auto rev = rawAccessor->getSubmoduleRev(submodule.path);
|
auto rev = rawAccessor->getSubmoduleRev(submodule.path);
|
||||||
result.push_back({std::move(submodule), rev});
|
result.push_back({std::move(submodule), rev});
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,7 +12,7 @@ struct GitRepo
|
||||||
virtual ~GitRepo()
|
virtual ~GitRepo()
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
static ref<GitRepo> openRepo(const CanonPath & path, bool create = false, bool bare = false);
|
static ref<GitRepo> openRepo(const std::filesystem::path & path, bool create = false, bool bare = false);
|
||||||
|
|
||||||
virtual uint64_t getRevCount(const Hash & rev) = 0;
|
virtual uint64_t getRevCount(const Hash & rev) = 0;
|
||||||
|
|
||||||
|
|
|
@ -50,10 +50,12 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
||||||
return lutimes(path.c_str(), times) == 0;
|
return lutimes(path.c_str(), times) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
Path getCachePath(std::string_view key)
|
Path getCachePath(std::string_view key, bool shallow)
|
||||||
{
|
{
|
||||||
return getCacheDir() + "/nix/gitv3/" +
|
return getCacheDir()
|
||||||
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
+ "/nix/gitv3/"
|
||||||
|
+ hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false)
|
||||||
|
+ (shallow ? "-shallow" : "");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns the name of the HEAD branch.
|
// Returns the name of the HEAD branch.
|
||||||
|
@ -92,7 +94,8 @@ std::optional<std::string> readHead(const Path & path)
|
||||||
// Persist the HEAD ref from the remote repo in the local cached repo.
|
// Persist the HEAD ref from the remote repo in the local cached repo.
|
||||||
bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
|
bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
|
||||||
{
|
{
|
||||||
Path cacheDir = getCachePath(actualUrl);
|
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||||
|
Path cacheDir = getCachePath(actualUrl, false);
|
||||||
try {
|
try {
|
||||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
||||||
} catch (ExecError &e) {
|
} catch (ExecError &e) {
|
||||||
|
@ -107,7 +110,8 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
||||||
{
|
{
|
||||||
// Create a cache path to store the branch of the HEAD ref. Append something
|
// Create a cache path to store the branch of the HEAD ref. Append something
|
||||||
// in front of the URL to prevent collision with the repository itself.
|
// in front of the URL to prevent collision with the repository itself.
|
||||||
Path cacheDir = getCachePath(actualUrl);
|
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||||
|
Path cacheDir = getCachePath(actualUrl, false);
|
||||||
Path headRefFile = cacheDir + "/HEAD";
|
Path headRefFile = cacheDir + "/HEAD";
|
||||||
|
|
||||||
time_t now = time(0);
|
time_t now = time(0);
|
||||||
|
@ -315,7 +319,7 @@ struct GitInputScheme : InputScheme
|
||||||
if (!repoInfo.isLocal)
|
if (!repoInfo.isLocal)
|
||||||
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||||
|
|
||||||
writeFile((CanonPath(repoInfo.url) + path).abs(), contents);
|
writeFile((CanonPath(repoInfo.url) / path).abs(), contents);
|
||||||
|
|
||||||
auto result = runProgram(RunOptions {
|
auto result = runProgram(RunOptions {
|
||||||
.program = "git",
|
.program = "git",
|
||||||
|
@ -411,7 +415,7 @@ struct GitInputScheme : InputScheme
|
||||||
// If this is a local directory and no ref or revision is
|
// If this is a local directory and no ref or revision is
|
||||||
// given, then allow the use of an unclean working tree.
|
// given, then allow the use of an unclean working tree.
|
||||||
if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
|
if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
|
||||||
repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo();
|
repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo();
|
||||||
|
|
||||||
return repoInfo;
|
return repoInfo;
|
||||||
}
|
}
|
||||||
|
@ -425,7 +429,7 @@ struct GitInputScheme : InputScheme
|
||||||
if (auto res = cache->lookup(key))
|
if (auto res = cache->lookup(key))
|
||||||
return getIntAttr(*res, "lastModified");
|
return getIntAttr(*res, "lastModified");
|
||||||
|
|
||||||
auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev);
|
auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev);
|
||||||
|
|
||||||
cache->upsert(key, Attrs{{"lastModified", lastModified}});
|
cache->upsert(key, Attrs{{"lastModified", lastModified}});
|
||||||
|
|
||||||
|
@ -443,7 +447,7 @@ struct GitInputScheme : InputScheme
|
||||||
|
|
||||||
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
|
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
|
||||||
|
|
||||||
auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev);
|
auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev);
|
||||||
|
|
||||||
cache->upsert(key, Attrs{{"revCount", revCount}});
|
cache->upsert(key, Attrs{{"revCount", revCount}});
|
||||||
|
|
||||||
|
@ -453,7 +457,7 @@ struct GitInputScheme : InputScheme
|
||||||
std::string getDefaultRef(const RepoInfo & repoInfo) const
|
std::string getDefaultRef(const RepoInfo & repoInfo) const
|
||||||
{
|
{
|
||||||
auto head = repoInfo.isLocal
|
auto head = repoInfo.isLocal
|
||||||
? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef()
|
? GitRepo::openRepo(repoInfo.url)->getWorkdirRef()
|
||||||
: readHeadCached(repoInfo.url);
|
: readHeadCached(repoInfo.url);
|
||||||
if (!head) {
|
if (!head) {
|
||||||
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
|
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
|
||||||
|
@ -506,16 +510,16 @@ struct GitInputScheme : InputScheme
|
||||||
if (repoInfo.isLocal) {
|
if (repoInfo.isLocal) {
|
||||||
repoDir = repoInfo.url;
|
repoDir = repoInfo.url;
|
||||||
if (!input.getRev())
|
if (!input.getRev())
|
||||||
input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev());
|
input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev());
|
||||||
} else {
|
} else {
|
||||||
Path cacheDir = getCachePath(repoInfo.url);
|
Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input));
|
||||||
repoDir = cacheDir;
|
repoDir = cacheDir;
|
||||||
repoInfo.gitDir = ".";
|
repoInfo.gitDir = ".";
|
||||||
|
|
||||||
createDirs(dirOf(cacheDir));
|
createDirs(dirOf(cacheDir));
|
||||||
PathLocks cacheDirLock({cacheDir});
|
PathLocks cacheDirLock({cacheDir});
|
||||||
|
|
||||||
auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true);
|
auto repo = GitRepo::openRepo(cacheDir, true, true);
|
||||||
|
|
||||||
Path localRefFile =
|
Path localRefFile =
|
||||||
ref.compare(0, 5, "refs/") == 0
|
ref.compare(0, 5, "refs/") == 0
|
||||||
|
@ -584,7 +588,7 @@ struct GitInputScheme : InputScheme
|
||||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||||
}
|
}
|
||||||
|
|
||||||
auto repo = GitRepo::openRepo(CanonPath(repoDir));
|
auto repo = GitRepo::openRepo(repoDir);
|
||||||
|
|
||||||
auto isShallow = repo->isShallow();
|
auto isShallow = repo->isShallow();
|
||||||
|
|
||||||
|
@ -660,7 +664,7 @@ struct GitInputScheme : InputScheme
|
||||||
for (auto & submodule : repoInfo.workdirInfo.submodules)
|
for (auto & submodule : repoInfo.workdirInfo.submodules)
|
||||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||||
|
|
||||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false);
|
auto repo = GitRepo::openRepo(repoInfo.url, false, false);
|
||||||
|
|
||||||
auto exportIgnore = getExportIgnoreAttr(input);
|
auto exportIgnore = getExportIgnoreAttr(input);
|
||||||
|
|
||||||
|
@ -676,7 +680,7 @@ struct GitInputScheme : InputScheme
|
||||||
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
||||||
|
|
||||||
for (auto & submodule : repoInfo.workdirInfo.submodules) {
|
for (auto & submodule : repoInfo.workdirInfo.submodules) {
|
||||||
auto submodulePath = CanonPath(repoInfo.url) + submodule.path;
|
auto submodulePath = CanonPath(repoInfo.url) / submodule.path;
|
||||||
fetchers::Attrs attrs;
|
fetchers::Attrs attrs;
|
||||||
attrs.insert_or_assign("type", "git");
|
attrs.insert_or_assign("type", "git");
|
||||||
attrs.insert_or_assign("url", submodulePath.abs());
|
attrs.insert_or_assign("url", submodulePath.abs());
|
||||||
|
@ -699,7 +703,7 @@ struct GitInputScheme : InputScheme
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!repoInfo.workdirInfo.isDirty) {
|
if (!repoInfo.workdirInfo.isDirty) {
|
||||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
|
auto repo = GitRepo::openRepo(repoInfo.url);
|
||||||
|
|
||||||
if (auto ref = repo->getWorkdirRef())
|
if (auto ref = repo->getWorkdirRef())
|
||||||
input.attrs.insert_or_assign("ref", *ref);
|
input.attrs.insert_or_assign("ref", *ref);
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue